diff --git a/.gitattributes b/.gitattributes index c7d9f3332a950355d5a77d85000f05e6f45435ea..c673bd5c4941796de98e3332dd25aba75cf6b67b 100644 --- a/.gitattributes +++ b/.gitattributes @@ -32,3 +32,10 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text *.zip filter=lfs diff=lfs merge=lfs -text *.zst filter=lfs diff=lfs merge=lfs -text *tfevents* filter=lfs diff=lfs merge=lfs -text +demo_audio/rich_short.wav filter=lfs diff=lfs merge=lfs -text +demo_audio/rich.wav filter=lfs diff=lfs merge=lfs -text +demo_audio/song.wav filter=lfs diff=lfs merge=lfs -text +demo/rich/rich.mp4 filter=lfs diff=lfs merge=lfs -text +demo/song/song.mp4 filter=lfs diff=lfs merge=lfs -text +demo/style/diversity.mp4 filter=lfs diff=lfs merge=lfs -text +visualise/teaser_01.png filter=lfs diff=lfs merge=lfs -text diff --git a/README.md b/README.md index 1618a9a17280c73f8c0398d6a334853067d32da7..b67714931e475ca7a9ca091ede344864627d95a6 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,8 @@ --- title: TalkSHOW -emoji: 🏃 +emoji: 🌍 colorFrom: pink -colorTo: green +colorTo: red sdk: gradio sdk_version: 3.23.0 app_file: app.py diff --git a/__init__.py b/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app.py b/app.py new file mode 100644 index 0000000000000000000000000000000000000000..d18cd6c325aa663861f6eeadc8614fd5dac7a7b9 --- /dev/null +++ b/app.py @@ -0,0 +1,282 @@ +import gradio as gr +import os +import sys +sys.path.append(os.getcwd()) +os.system(r"cd mesh-master") +os.system(r"make all") +os.system(r"cd ..") + +from transformers import Wav2Vec2Processor + +import numpy as np +import json +import smplx as smpl + +from nets import * +from trainer.options import parse_args +from data_utils import torch_data +from trainer.config import load_JsonConfig + +import torch +import torch.nn as nn +import torch.nn.functional as F +from torch.utils import data +from data_utils.rotation_conversion import rotation_6d_to_matrix, matrix_to_axis_angle +from data_utils.lower_body import part2full, pred2poses, poses2pred, poses2poses +from visualise.rendering import RenderTool + +global device +device = 'cpu' + + +def init_model(model_name, model_path, args, config): + if model_name == 's2g_face': + generator = s2g_face( + args, + config, + ) + elif model_name == 's2g_body_vq': + generator = s2g_body_vq( + args, + config, + ) + elif model_name == 's2g_body_pixel': + generator = s2g_body_pixel( + args, + config, + ) + elif model_name == 's2g_LS3DCG': + generator = LS3DCG( + args, + config, + ) + else: + raise NotImplementedError + + model_ckpt = torch.load(model_path, map_location=torch.device('cpu')) + if model_name == 'smplx_S2G': + generator.generator.load_state_dict(model_ckpt['generator']['generator']) + + elif 'generator' in list(model_ckpt.keys()): + generator.load_state_dict(model_ckpt['generator']) + else: + model_ckpt = {'generator': model_ckpt} + generator.load_state_dict(model_ckpt) + + return generator + + +def get_vertices(smplx_model, betas, result_list, exp, require_pose=False): + vertices_list = [] + poses_list = [] + expression = torch.zeros([1, 100]) + + for i in result_list: + vertices = [] + poses = [] + for j in range(i.shape[0]): + output = smplx_model(betas=betas, + expression=i[j][165:265].unsqueeze_(dim=0) if exp else expression, + jaw_pose=i[j][0:3].unsqueeze_(dim=0), + leye_pose=i[j][3:6].unsqueeze_(dim=0), + reye_pose=i[j][6:9].unsqueeze_(dim=0), + global_orient=i[j][9:12].unsqueeze_(dim=0), + body_pose=i[j][12:75].unsqueeze_(dim=0), + left_hand_pose=i[j][75:120].unsqueeze_(dim=0), + right_hand_pose=i[j][120:165].unsqueeze_(dim=0), + return_verts=True) + vertices.append(output.vertices.detach().cpu().numpy().squeeze()) + # pose = torch.cat([output.body_pose, output.left_hand_pose, output.right_hand_pose], dim=1) + pose = output.body_pose + poses.append(pose.detach().cpu()) + vertices = np.asarray(vertices) + vertices_list.append(vertices) + poses = torch.cat(poses, dim=0) + poses_list.append(poses) + if require_pose: + return vertices_list, poses_list + else: + return vertices_list, None + + +global_orient = torch.tensor([3.0747, -0.0158, -0.0152]) + +parser = parse_args() +args = parser.parse_args() + +RUN_MODE = "local" +if RUN_MODE != "local": + os.system("wget -P experiments/2022-10-15-smplx_S2G-face-3d/ " + "https://huggingface.co/feifeifeiliu/TalkSHOW/resolve/main/2022-10-15-smplx_S2G-face-3d/ckpt-99.pth") + os.system("wget -P experiments/2022-10-31-smplx_S2G-body-vq-3d/ " + "https://huggingface.co/feifeifeiliu/TalkSHOW/resolve/main/2022-10-31-smplx_S2G-body-vq-3d/ckpt-99.pth") + os.system("wget -P experiments/2022-11-02-smplx_S2G-body-pixel-3d/ " + "https://huggingface.co/feifeifeiliu/TalkSHOW/resolve/main/2022-11-02-smplx_S2G-body-pixel-3d/ckpt-99.pth") + os.system("wget -P visualise/smplx/ " + "https://huggingface.co/feifeifeiliu/TalkSHOW/resolve/main/smplx/SMPLX_NEUTRAL.npz") + +config = load_JsonConfig("config/body_pixel.json") + +face_model_name = args.face_model_name +face_model_path = args.face_model_path +body_model_name = args.body_model_name +body_model_path = args.body_model_path +smplx_path = './visualise/' + +os.environ['smplx_npz_path'] = config.smplx_npz_path +os.environ['extra_joint_path'] = config.extra_joint_path +os.environ['j14_regressor_path'] = config.j14_regressor_path + +print('init model...') +g_body = init_model(body_model_name, body_model_path, args, config) +generator2 = None +g_face = init_model(face_model_name, face_model_path, args, config) + +print('init smlpx model...') +dtype = torch.float64 +model_params = dict(model_path=smplx_path, + model_type='smplx', + create_global_orient=True, + create_body_pose=True, + create_betas=True, + num_betas=300, + create_left_hand_pose=True, + create_right_hand_pose=True, + use_pca=False, + flat_hand_mean=False, + create_expression=True, + num_expression_coeffs=100, + num_pca_comps=12, + create_jaw_pose=True, + create_leye_pose=True, + create_reye_pose=True, + create_transl=False, + # gender='ne', + dtype=dtype, ) +smplx_model = smpl.create(**model_params).to(device) +print('init rendertool...') +rendertool = RenderTool('visualise/video/' + config.Log.name) + + +def infer(wav, identity, pose): + betas = torch.zeros([1, 300], dtype=torch.float64).to(device) + am = Wav2Vec2Processor.from_pretrained("vitouphy/wav2vec2-xls-r-300m-phoneme") + am_sr = 16000 + num_sample = args.num_sample + cur_wav_file = wav + + if pose == 'Stand': + stand = True + face = False + elif pose == 'Sit': + stand = False + face = False + else: + stand = False + face = True + + if face: + body_static = torch.zeros([1, 162], device=device) + body_static[:, 6:9] = torch.tensor([3.0747, -0.0158, -0.0152]).reshape(1, 3).repeat(body_static.shape[0], 1) + + if identity == 'Oliver': + id = 0 + elif identity == 'Chemistry': + id = 1 + elif identity == 'Seth': + id = 2 + elif identity == 'Conan': + id = 3 + + result_list = [] + + pred_face = g_face.infer_on_audio(cur_wav_file, + initial_pose=None, + norm_stats=None, + w_pre=False, + # id=id, + frame=None, + am=am, + am_sr=am_sr + ) + pred_face = torch.tensor(pred_face).squeeze().to(device) + # pred_face = torch.zeros([gt.shape[0], 105]) + + if config.Data.pose.convert_to_6d: + pred_jaw = pred_face[:, :6].reshape(pred_face.shape[0], -1, 6) + pred_jaw = matrix_to_axis_angle(rotation_6d_to_matrix(pred_jaw)).reshape(pred_face.shape[0], -1) + pred_face = pred_face[:, 6:] + else: + pred_jaw = pred_face[:, :3] + pred_face = pred_face[:, 3:] + + id = torch.tensor([id], device=device) + + for i in range(num_sample): + pred_res = g_body.infer_on_audio(cur_wav_file, + initial_pose=None, + norm_stats=None, + txgfile=None, + id=id, + var=None, + fps=30, + w_pre=False + ) + pred = torch.tensor(pred_res).squeeze().to(device) + + if pred.shape[0] < pred_face.shape[0]: + repeat_frame = pred[-1].unsqueeze(dim=0).repeat(pred_face.shape[0] - pred.shape[0], 1) + pred = torch.cat([pred, repeat_frame], dim=0) + else: + pred = pred[:pred_face.shape[0], :] + + body_or_face = False + if pred.shape[1] < 275: + body_or_face = True + if config.Data.pose.convert_to_6d: + pred = pred.reshape(pred.shape[0], -1, 6) + pred = matrix_to_axis_angle(rotation_6d_to_matrix(pred)) + pred = pred.reshape(pred.shape[0], -1) + + if config.Model.model_name == 's2g_LS3DCG': + pred = torch.cat([pred[:, :3], pred[:, 103:], pred[:, 3:103]], dim=-1) + else: + pred = torch.cat([pred_jaw, pred, pred_face], dim=-1) + + # pred[:, 9:12] = global_orient + pred = part2full(pred, stand) + if face: + pred = torch.cat([pred[:, :3], body_static.repeat(pred.shape[0], 1), pred[:, -100:]], dim=-1) + # result_list[0] = poses2pred(result_list[0], stand) + # if gt_0 is None: + # gt_0 = gt + # pred = pred2poses(pred, gt_0) + # result_list[0] = poses2poses(result_list[0], gt_0) + + result_list.append(pred) + + + vertices_list, _ = get_vertices(smplx_model, betas, result_list, config.Data.pose.expression) + + result_list = [res.to('cpu') for res in result_list] + dict = np.concatenate(result_list[:], axis=0) + + rendertool._render_sequences(cur_wav_file, vertices_list, stand=stand, face=face, whole_body=args.whole_body) + return "result.mp4" + +def main(): + + iface = gr.Interface(fn=infer, inputs=["audio", + gr.Radio(["Oliver", "Chemistry", "Seth", "Conan"]), + gr.Radio(["Stand", "Sit", "Only Face"]), + ], + outputs="video", + examples=[[os.path.join(os.path.dirname(__file__), "demo_audio/style.wav"), "Oliver", "Sit"]]) + iface.launch(debug=True) + + +if __name__ == '__main__': + main() + + + diff --git a/config/LS3DCG.json b/config/LS3DCG.json new file mode 100644 index 0000000000000000000000000000000000000000..d25a862cf7ffbdd1d6e45fdd476cb7b02767e58c --- /dev/null +++ b/config/LS3DCG.json @@ -0,0 +1,60 @@ +{ + "config_root_path": "/is/cluster/scratch/hyi/ExpressiveBody/SMPLifyX4/scripts", + "dataset_load_mode": "pickle", + "store_file_path": "store.pkl", + "smplx_npz_path": "visualise/smplx_model/SMPLX_NEUTRAL_2020.npz", + "extra_joint_path": "visualise/smplx_model/smplx_extra_joints.yaml", + "j14_regressor_path": "visualise/smplx_model/SMPLX_to_J14.pkl", + "param": { + "w_j": 1, + "w_b": 1, + "w_h": 1 + }, + "Data": { + "data_root": "../ExpressiveWholeBodyDatasetv1.0/", + "pklname": "_3d_mfcc.pkl", + "whole_video": false, + "pose": { + "normalization": false, + "convert_to_6d": false, + "norm_method": "all", + "augmentation": false, + "generate_length": 88, + "pre_pose_length": 0, + "pose_dim": 99, + "expression": true + }, + "aud": { + "feat_method": "mfcc", + "aud_feat_dim": 64, + "aud_feat_win_size": null, + "context_info": false + } + }, + "Model": { + "model_type": "body", + "model_name": "s2g_LS3DCG", + "code_num": 2048, + "AudioOpt": "Adam", + "encoder_choice": "mfcc", + "gan": false, + }, + "DataLoader": { + "batch_size": 128, + "num_workers": 0 + }, + "Train": { + "epochs": 100, + "max_gradient_norm": 5, + "learning_rate": { + "generator_learning_rate": 1e-4, + "discriminator_learning_rate": 1e-4 + } + }, + "Log": { + "save_every": 50, + "print_every": 200, + "name": "LS3DCG" + } +} + \ No newline at end of file diff --git a/config/body_pixel.json b/config/body_pixel.json new file mode 100644 index 0000000000000000000000000000000000000000..67308c1f10cb41ea94f615abff403902e1a29ee1 --- /dev/null +++ b/config/body_pixel.json @@ -0,0 +1,63 @@ +{ + "config_root_path": "/is/cluster/scratch/hyi/ExpressiveBody/SMPLifyX4/scripts", + "dataset_load_mode": "pickle", + "store_file_path": "store.pkl", + "smplx_npz_path": "visualise/smplx_model/SMPLX_NEUTRAL_2020.npz", + "extra_joint_path": "visualise/smplx_model/smplx_extra_joints.yaml", + "j14_regressor_path": "visualise/smplx_model/SMPLX_to_J14.pkl", + "param": { + "w_j": 1, + "w_b": 1, + "w_h": 1 + }, + "Data": { + "data_root": "../ExpressiveWholeBodyDatasetv1.0/", + "pklname": "_3d_mfcc.pkl", + "whole_video": false, + "pose": { + "normalization": false, + "convert_to_6d": false, + "norm_method": "all", + "augmentation": false, + "generate_length": 88, + "pre_pose_length": 0, + "pose_dim": 99, + "expression": true + }, + "aud": { + "feat_method": "mfcc", + "aud_feat_dim": 64, + "aud_feat_win_size": null, + "context_info": false + } + }, + "Model": { + "model_type": "body", + "model_name": "s2g_body_pixel", + "composition": true, + "code_num": 2048, + "bh_model": true, + "AudioOpt": "Adam", + "encoder_choice": "mfcc", + "gan": false, + "vq_path": "./experiments/2022-10-31-smplx_S2G-body-vq-3d/ckpt-99.pth" + }, + "DataLoader": { + "batch_size": 128, + "num_workers": 0 + }, + "Train": { + "epochs": 100, + "max_gradient_norm": 5, + "learning_rate": { + "generator_learning_rate": 1e-4, + "discriminator_learning_rate": 1e-4 + } + }, + "Log": { + "save_every": 50, + "print_every": 200, + "name": "body-pixel2" + } +} + \ No newline at end of file diff --git a/config/body_vq.json b/config/body_vq.json new file mode 100644 index 0000000000000000000000000000000000000000..df2bd0106f5243b0479ba17f71325faeaa645f80 --- /dev/null +++ b/config/body_vq.json @@ -0,0 +1,62 @@ +{ + "config_root_path": "/is/cluster/scratch/hyi/ExpressiveBody/SMPLifyX4/scripts", + "dataset_load_mode": "pickle", + "store_file_path": "store.pkl", + "smplx_npz_path": "visualise/smplx_model/SMPLX_NEUTRAL_2020.npz", + "extra_joint_path": "visualise/smplx_model/smplx_extra_joints.yaml", + "j14_regressor_path": "visualise/smplx_model/SMPLX_to_J14.pkl", + "param": { + "w_j": 1, + "w_b": 1, + "w_h": 1 + }, + "Data": { + "data_root": "../expressive_body-V0.7/", + "pklname": "_3d_mfcc.pkl", + "whole_video": false, + "pose": { + "normalization": false, + "convert_to_6d": false, + "norm_method": "all", + "augmentation": false, + "generate_length": 88, + "pre_pose_length": 0, + "pose_dim": 99, + "expression": true + }, + "aud": { + "feat_method": "mfcc", + "aud_feat_dim": 64, + "aud_feat_win_size": null, + "context_info": false + } + }, + "Model": { + "model_type": "body", + "model_name": "s2g_body_vq", + "composition": false, + "code_num": 2048, + "bh_model": true, + "AudioOpt": "Adam", + "encoder_choice": "mfcc", + "gan": false + }, + "DataLoader": { + "batch_size": 128, + "num_workers": 0 + }, + "Train": { + "epochs": 100, + "max_gradient_norm": 5, + "learning_rate": { + "generator_learning_rate": 1e-4, + "discriminator_learning_rate": 1e-4 + } + }, + "Log": { + "save_every": 50, + "print_every": 200, + "name": "test" + } +} + \ No newline at end of file diff --git a/config/face.json b/config/face.json new file mode 100644 index 0000000000000000000000000000000000000000..39e6d18bb863853408d9a69388235e29c51e1dd7 --- /dev/null +++ b/config/face.json @@ -0,0 +1,59 @@ +{ + "config_root_path": "/is/cluster/scratch/hyi/ExpressiveBody/SMPLifyX4/scripts", + "dataset_load_mode": "json", + "store_file_path": "store.pkl", + "smplx_npz_path": "visualise/smplx_model/SMPLX_NEUTRAL_2020.npz", + "extra_joint_path": "visualise/smplx_model/smplx_extra_joints.yaml", + "j14_regressor_path": "visualise/smplx_model/SMPLX_to_J14.pkl", + "param": { + "w_j": 1, + "w_b": 1, + "w_h": 1 + }, + "Data": { + "data_root": "../ExpressiveWholeBodyDatasetv1.0/", + "pklname": "_3d_wv2.pkl", + "whole_video": true, + "pose": { + "normalization": false, + "convert_to_6d": false, + "norm_method": "all", + "augmentation": false, + "generate_length": 88, + "pre_pose_length": 0, + "pose_dim": 99, + "expression": true + }, + "aud": { + "feat_method": "mfcc", + "aud_feat_dim": 64, + "aud_feat_win_size": null, + "context_info": false + } + }, + "Model": { + "model_type": "face", + "model_name": "s2g_face", + "AudioOpt": "SGD", + "encoder_choice": "faceformer", + "gan": false + }, + "DataLoader": { + "batch_size": 1, + "num_workers": 0 + }, + "Train": { + "epochs": 100, + "max_gradient_norm": 5, + "learning_rate": { + "generator_learning_rate": 1e-4, + "discriminator_learning_rate": 1e-4 + } + }, + "Log": { + "save_every": 50, + "print_every": 1000, + "name": "face" + } +} + \ No newline at end of file diff --git a/data_utils/__init__.py b/data_utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7c3cab8cad67c5e952924944f34319e2c6ff5985 --- /dev/null +++ b/data_utils/__init__.py @@ -0,0 +1,3 @@ +# from .dataloader_csv import MultiVidData as csv_data +from .dataloader_torch import MultiVidData as torch_data +from .utils import get_melspec, get_mfcc, get_mfcc_old, get_mfcc_psf, get_mfcc_psf_min, get_mfcc_ta \ No newline at end of file diff --git a/data_utils/__pycache__/__init__.cpython-37.pyc b/data_utils/__pycache__/__init__.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..af8bc565bd21df88e0dd49777e6c95b6ad963c69 Binary files /dev/null and b/data_utils/__pycache__/__init__.cpython-37.pyc differ diff --git a/data_utils/__pycache__/consts.cpython-37.pyc b/data_utils/__pycache__/consts.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0e9ef53023f72d605da950510b1b8bbe2a733576 Binary files /dev/null and b/data_utils/__pycache__/consts.cpython-37.pyc differ diff --git a/data_utils/__pycache__/dataloader_torch.cpython-37.pyc b/data_utils/__pycache__/dataloader_torch.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2de85426ba6129c04f66b4f9992b56e1076e44c3 Binary files /dev/null and b/data_utils/__pycache__/dataloader_torch.cpython-37.pyc differ diff --git a/data_utils/__pycache__/lower_body.cpython-37.pyc b/data_utils/__pycache__/lower_body.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..48997dca34b9e594ca94ed3de329a4c8863d3cae Binary files /dev/null and b/data_utils/__pycache__/lower_body.cpython-37.pyc differ diff --git a/data_utils/__pycache__/mesh_dataset.cpython-37.pyc b/data_utils/__pycache__/mesh_dataset.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..20ace67b76abda4406dd8aae3517a0b89927824d Binary files /dev/null and b/data_utils/__pycache__/mesh_dataset.cpython-37.pyc differ diff --git a/data_utils/__pycache__/rotation_conversion.cpython-37.pyc b/data_utils/__pycache__/rotation_conversion.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e00ee73ac6a8710aa1090f41d5e7d465655f42d6 Binary files /dev/null and b/data_utils/__pycache__/rotation_conversion.cpython-37.pyc differ diff --git a/data_utils/__pycache__/utils.cpython-37.pyc b/data_utils/__pycache__/utils.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a9b372d6f540b55122f2620db7cf1a5125a0f0fe Binary files /dev/null and b/data_utils/__pycache__/utils.cpython-37.pyc differ diff --git a/data_utils/axis2matrix.py b/data_utils/axis2matrix.py new file mode 100644 index 0000000000000000000000000000000000000000..56c375a1b11b0bbe81ce16841f4f09961d25ba7e --- /dev/null +++ b/data_utils/axis2matrix.py @@ -0,0 +1,29 @@ +import numpy as np +import math +import scipy.linalg as linalg + + +def rotate_mat(axis, radian): + + a = np.cross(np.eye(3), axis / linalg.norm(axis) * radian) + + rot_matrix = linalg.expm(a) + + return rot_matrix + +def aaa2mat(axis, sin, cos): + i = np.eye(3) + nnt = np.dot(axis.T, axis) + s = np.asarray([[0, -axis[0,2], axis[0,1]], + [axis[0,2], 0, -axis[0,0]], + [-axis[0,1], axis[0,0], 0]]) + r = cos * i + (1-cos)*nnt +sin * s + return r + +rand_axis = np.asarray([[1,0,0]]) +#旋转角度 +r = math.pi/2 +#返回旋转矩阵 +rot_matrix = rotate_mat(rand_axis, r) +r2 = aaa2mat(rand_axis, np.sin(r), np.cos(r)) +print(rot_matrix) \ No newline at end of file diff --git a/data_utils/consts.py b/data_utils/consts.py new file mode 100644 index 0000000000000000000000000000000000000000..70406b9d458588030508ca656492d274f48cbf3f --- /dev/null +++ b/data_utils/consts.py @@ -0,0 +1,1878 @@ +import numpy as np + +speaker_id = { + 'Amel_Karboul': 0, + 'Bill_Gates': 1, + 'Christina_Wallace': 2, + 'Dan_Ariely': 3, + 'daniel_susskind': 4, + 'Dena_Simmons': 5, + 'Enric_Sala': 6, + 'FeiFei_Li': 7, + 'GabeBarcia_Colombo': 8, + 'Kelly_Richmond_Pope': 9, + 'Keller_Rinaudo': 10, + 'Laurel_Braitman': 11, + 'Lisa_Feldman_Barrett': 12, + 'molly_winter': 13, + 'Sara_DeWitt': 14, + 'Seema_Bansal': 15, + 'Stacy_Smith': 16, + 'Stanley_McChrystal': 17, + 'Vicki_Arroyo': 18, + 'speeker_oliver': 19, + 'oliver': 20, + 'chemistry': 21, + 'seth': 22, + 'conan': 23, +} + +checker_stats={ + 'angelica': { + "finger_distance": 0.8, + "finger_angle": 0.2, + "finger_offset": [0.9, 0.9], + "finger_position": [0.9, 0.9, 0.9, 0.9], + "finger_velocity": [[0.08, 0.08, 0.08, 0.08], [0.06, 0.06, 0.06, 0.06], [0.15, 0.15, 0.15, 0.15]] + }, + + 'oliver': { + "finger_distance": 0.8, + "finger_angle": 0.2, + "finger_offset": [0.9, 0.9], + "finger_position": [0.9, 0.9, 0.9, 0.9], + "finger_velocity": [[0.08, 0.08, 0.08, 0.08], [0.06, 0.06, 0.06, 0.06], [0.15, 0.15, 0.15, 0.15]] + }, + + 'seth': { + "finger_distance": 0.8, + "finger_angle": 0.2, + "finger_offset": [0.9, 0.9], + "finger_position": [0.9, 0.9, 0.9, 0.9], + "finger_velocity": [[0.08, 0.08, 0.08, 0.08], [0.06, 0.06, 0.06, 0.06], [0.15, 0.15, 0.15, 0.15]] + }, + + 'shelly': { + "finger_distance": 0.8, + "finger_angle": 0.2, + "finger_offset": [0.9, 0.9], + "finger_position": [0.9, 0.9, 0.9, 0.9], + "finger_velocity": [[0.08, 0.08, 0.08, 0.08], [0.06, 0.06, 0.06, 0.06], [0.15, 0.15, 0.15, 0.15]] + }, + + 'Dan_Ariely': { #(38685, 54804) + "finger_distance": 0.9, + "finger_angle": 0.3, + "finger_offset": [1.5, 1.5], + "finger_position": [1.5, 1.5, 1.5, 1.5], + "finger_velocity": [[0.08, 0.08, 0.08, 0.08], [0.06, 0.06, 0.06, 0.06], [0.15, 0.15, 0.15, 0.15]], + "positive_thres": 3, + "negative_thres": 2 + }, + + 'Bill_Gates': { #(29734, 43630) + "finger_distance": 0.9, + "finger_angle": 0.3, + "finger_offset": [1.5, 1.5], + "finger_position": [1.5, 1.5, 1.5, 1.5], + "finger_velocity": [[0.08, 0.08, 0.08, 0.08], [0.06, 0.06, 0.06, 0.06], [0.15, 0.15, 0.15, 0.15]], + "positive_thres": 3, + "negative_thres": 2 + }, + + 'Amel_Karboul': { + "finger_distance": 0.9, + "finger_angle": 0.3, + "finger_offset": [1.5, 1.5], + "finger_position": [1.5, 1.5, 1.5, 1.5], + "finger_velocity": [[0.08, 0.08, 0.08, 0.08], [0.06, 0.06, 0.06, 0.06], [0.15, 0.15, 0.15, 0.15]], + "positive_thres": 3, + "negative_thres": 2 + }, + + 'Christina_Wallace': { + "finger_distance": 0.9, + "finger_angle": 0.3, + "finger_offset": [1.5, 1.5], + "finger_position": [1.5, 1.5, 1.5, 1.5], + "finger_velocity": [[0.08, 0.08, 0.08, 0.08], [0.06, 0.06, 0.06, 0.06], [0.15, 0.15, 0.15, 0.15]], + "positive_thres": 3, + "negative_thres": 2 + }, + + 'daniel_susskind': { #(21066, 54714) + "finger_distance": 0.9, + "finger_angle": 0.3, + "finger_offset": [1.5, 1.5], + "finger_position": [1.5, 1.5, 1.5, 1.5], + "finger_velocity": [[0.08, 0.08, 0.08, 0.08], [0.06, 0.06, 0.06, 0.06], [0.15, 0.15, 0.15, 0.15]], + "positive_thres": 2, + "negative_thres": 2 + }, + + 'Dena_Simmons': { + "finger_distance": 0.9, + "finger_angle": 0.3, + "finger_offset": [1.5, 1.5], + "finger_position": [1.5, 1.5, 1.5, 1.5], + "finger_velocity": [[0.08, 0.08, 0.08, 0.08], [0.06, 0.06, 0.06, 0.06], [0.15, 0.15, 0.15, 0.15]], + "positive_thres": 3, + "negative_thres": 2 + }, + + 'Enric_Sala': { + "finger_distance": 0.9, + "finger_angle": 0.3, + "finger_offset": [1.5, 1.5], + "finger_position": [1.5, 1.5, 1.5, 1.5], + "finger_velocity": [[0.08, 0.08, 0.08, 0.08], [0.06, 0.06, 0.06, 0.06], [0.15, 0.15, 0.15, 0.15]], + "positive_thres": 2, + "negative_thres": 2 + }, + + 'FeiFei_Li': { + "finger_distance": 0.9, + "finger_angle": 0.3, + "finger_offset": [1.5, 1.5], + "finger_position": [1.5, 1.5, 1.5, 1.5], + "finger_velocity": [[0.08, 0.08, 0.08, 0.08], [0.06, 0.06, 0.06, 0.06], [0.15, 0.15, 0.15, 0.15]], + "positive_thres": 2, + "negative_thres": -1 + }, + + 'GabeBarcia_Colombo': { + "finger_distance": 0.9, + "finger_angle": 0.3, + "finger_offset": [1.5, 1.5], + "finger_position": [1.5, 1.5, 1.5, 1.5], + "finger_velocity": [[0.08, 0.08, 0.08, 0.08], [0.06, 0.06, 0.06, 0.06], [0.15, 0.15, 0.15, 0.15]], + "positive_thres": 2, + "negative_thres": -1 + }, + + 'Keller_Rinaudo': { #(4011, 10623) + "finger_distance": 0.9, + "finger_angle": 0.3, + "finger_offset": [1.5, 1.5], + "finger_position": [1.5, 1.5, 1.5, 1.5], + "finger_velocity": [[0.08, 0.08, 0.08, 0.08], [0.06, 0.06, 0.06, 0.06], [0.15, 0.15, 0.15, 0.15]], + "positive_thres": 3, + "negative_thres": -1 + }, + + 'Kelly_Richmond_Pope': { + "finger_distance": 0.9, + "finger_angle": 0.3, + "finger_offset": [1.5, 1.5], + "finger_position": [1.5, 1.5, 1.5, 1.5], + "finger_velocity": [[0.08, 0.08, 0.08, 0.08], [0.06, 0.06, 0.06, 0.06], [0.15, 0.15, 0.15, 0.15]], + "positive_thres": 2, + "negative_thres": 2 + }, + + 'Laurel_Braitman': { + "finger_distance": 0.9, + "finger_angle": 0.3, + "finger_offset": [1.5, 1.5], + "finger_position": [1.5, 1.5, 1.5, 1.5], + "finger_velocity": [[0.08, 0.08, 0.08, 0.08], [0.06, 0.06, 0.06, 0.06], [0.15, 0.15, 0.15, 0.15]], + "positive_thres": 3, + "negative_thres": 2 + }, + + 'Lisa_Feldman_Barrett': { + "finger_distance": 0.9, + "finger_angle": 0.3, + "finger_offset": [1.5, 1.5], + "finger_position": [1.5, 1.5, 1.5, 1.5], + "finger_velocity": [[0.08, 0.08, 0.08, 0.08], [0.06, 0.06, 0.06, 0.06], [0.15, 0.15, 0.15, 0.15]], + "positive_thres": 2, + "negative_thres": 2 + }, + + 'molly_winter': { + "finger_distance": 0.9, + "finger_angle": 0.3, + "finger_offset": [1.5, 1.5], + "finger_position": [1.5, 1.5, 1.5, 1.5], + "finger_velocity": [[0.08, 0.08, 0.08, 0.08], [0.06, 0.06, 0.06, 0.06], [0.15, 0.15, 0.15, 0.15]], + "positive_thres": 2, + "negative_thres": 2 + }, + + 'Sara_DeWitt': { + "finger_distance": 0.9, + "finger_angle": 0.3, + "finger_offset": [1.5, 1.5], + "finger_position": [1.5, 1.5, 1.5, 1.5], + "finger_velocity": [[0.08, 0.08, 0.08, 0.08], [0.06, 0.06, 0.06, 0.06], [0.15, 0.15, 0.15, 0.15]], + "positive_thres": 3, + "negative_thres": 2 + }, + + 'Seema_Bansal': { + "finger_distance": 0.9, + "finger_angle": 0.3, + "finger_offset": [1.5, 1.5], + "finger_position": [1.5, 1.5, 1.5, 1.5], + "finger_velocity": [[0.08, 0.08, 0.08, 0.08], [0.06, 0.06, 0.06, 0.06], [0.15, 0.15, 0.15, 0.15]], + "positive_thres": 3, + "negative_thres": 2 + }, + + 'Vicki_Arroyo': { + "finger_distance": 0.9, + "finger_angle": 0.3, + "finger_offset": [1.5, 1.5], + "finger_position": [1.5, 1.5, 1.5, 1.5], + "finger_velocity": [[0.08, 0.08, 0.08, 0.08], [0.06, 0.06, 0.06, 0.06], [0.15, 0.15, 0.15, 0.15]], + "positive_thres": 3, + "negative_thres": 2 + }, + + 'Stacy_Smith_no_good': { + "finger_distance": 0.9, + "finger_angle": 0.3, + "finger_offset": [1.5, 1.5], + "finger_position": [1.5, 1.5, 1.5, 1.5], + "finger_velocity": [[0.08, 0.08, 0.08, 0.08], [0.06, 0.06, 0.06, 0.06], [0.15, 0.15, 0.15, 0.15]], + "positive_thres": 3, + "negative_thres": 2 + }, + + 'Stanley_McChrystal': { + "finger_distance": 0.9, + "finger_angle": 0.3, + "finger_offset": [1.5, 1.5], + "finger_position": [1.5, 1.5, 1.5, 1.5], + "finger_velocity": [[0.08, 0.08, 0.08, 0.08], [0.06, 0.06, 0.06, 0.06], [0.15, 0.15, 0.15, 0.15]], + "positive_thres": 3, + "negative_thres": 2 + }, +} + +SPEAKERS_CONFIG = { + 'almaram': + {'median': np.array([0., -106., -194., -104., 106., 162., 137., 134., + 144., 126., 118., 104., 119., 111., 105., 101., + 125., 122., 116., 111., 133., 131., 129., 124., + 140., 139., 140., 140., -101., -87., -82., -74., + -75., -86., -83., -81., -79., -85., -85., -82., + -81., -83., -85., -83., -82., -84., -81., -80., + -79., 0., -6., 117., 126., 5., 139., 139., + 116., 131., 144., 136., 135., 134., 131., 130., + 132., 135., 132., 133., 135., 137., 135., 136., + 137., 140., 138.5, 138., 139., 109., 110., 114., + 117., 117., 114., 116., 119., 119., 117., 120., + 123., 122., 122., 127., 126., 125., 125.5, 130., + 130., 128.]), + 'mean': np.array([0., -102.831, -187.225, -105.001, 103.684, 159.654, + 126.302, 122.931, 128.711, 112.718, 106.145, 96.845, + 108.125, 101.695, 97.327, 93.299, 112.474, 108.298, + 103.437, 99.548, 117.103, 113.895, 110.956, 106.925, + 121.72, 119.542, 118.971, 118.283, -96.014, -87.539, + -80.057, -73.948, -71.795, -82.32, -79.115, -76.73, + -74.388, -82.105, -80.296, -77.741, -76.255, -82.054, + -81.601, -79.339, -77.542, -83.858, -79.748, -79.177, + -78.283, 0., -5.559, 113.522, 114.089, 4.688, + 134.756, 128.186, 105.581, 113.738, 127.147, 112.519, + 113.059, 119.44, 113.858, 114.358, 116.299, 120.788, + 116.685, 117.822, 119.544, 126.314, 122.744, 122.36, + 122.702, 131.012, 127.488, 126.576, 125.979, 102.175, + 101.177, 100.914, 99.242, 96.896, 99.399, 99.966, + 100.655, 100.153, 102.595, 104.855, 107.204, 106.748, + 107.49, 111.118, 111.368, 110.997, 112.068, 114.87, + 115.63, 114.635]), + 'scale_factor': 1.518504709101034, + 'std': np.array([0., 15.77442357, 31.74083135, 38.76473912, 16.00981399, + 28.00046935, 40.03541927, 46.47948191, 52.37930392, 47.88657929, + 48.78288609, 48.42624263, 46.59846966, 48.26843663, 50.59365643, + 52.47084523, 48.17116694, 51.37568682, 53.37495696, 54.50430897, + 50.88302655, 54.47836245, 56.48290063, 57.29840639, 54.71323058, + 58.40441966, 60.72342348, 62.18302752, 46.9228708, 44.15867388, + 46.51470467, 49.62792859, 52.8391046, 49.06342426, 52.13875502, + 55.0459726, 57.37028374, 47.79499948, 51.69669606, 54.54852811, + 56.73739486, 48.16782208, 51.27987714, 53.97625477, 56.09597344, + 49.14352283, 51.41441914, 53.19296637, 54.5650521, 0., + 6.80856218, 25.98910379, 51.67654283, 6.51771862, 27.4053729, + 48.12067543, 44.45340751, 52.2424287, 71.22975074, 65.97734186, + 72.21212861, 66.43695056, 66.68810866, 70.34944091, 74.74944548, + 62.9067171, 64.10094988, 67.69410843, 71.30516155, 63.17366068, + 63.74567016, 65.48774236, 67.96067389, 63.03325992, 62.88430532, + 63.75716606, 64.36825739, 54.93795023, 60.33219432, 67.52062355, + 69.96675951, 72.42787574, 67.3515538, 68.93075398, 70.52320168, + 71.7720112, 64.30512402, 66.268982, 68.01139893, 69.14937813, + 62.07604933, 64.26042387, 64.83709259, 65.68755583, 59.83643853, + 61.15464905, 62.18182291, 62.71329823])}, + 'angelica': { + 'median': np.array([0., -112., -170., -51., 112., 169., 89., 65., + 49.5, 35., 37., 40., 40., 41., 41., 42., + 45., 45., 45., 45., 50., 49., 49., 48., + 55., 54., 53., 53., -39., -20., 4., 19., + 28., 4.5, 21., 28., 32., -1., 17., 23., + 25., -6., 9., 15., 18., -11., 2., 7., + 10., 0., 4., 153., 154., 0., 155., 158., + 157., 156., 157., 159., 160., 158., 170., 179., + 188., 163., 178., 190., 197., 167., 183., 194., + 200., 173., 187., 195., 200., 153., 146., 139., + 137., 134., 139., 148., 157., 163., 146., 158., + 168., 173., 154., 166., 174., 179., 163., 172., + 177., 180.]), + 'mean': np.array([0., -109.943, -170.558, -58.044, 109.827, 168.036, + 94.467, 58.97, 52.736, 45.407, 44.249, 45.665, + 51.639, 49.735, 48.165, 46.499, 56.536, 53.554, + 51.313, 48.981, 60.293, 57.029, 54.887, 53.198, + 62.166, 59.695, 58.707, 57.117, -43.247, -27.359, + -8.778, 3.673, 12.136, -8.683, 7.022, 14.388, + 18.369, -13.301, 3.559, 10.722, 14.248, -18.083, + -2.826, 4.011, 7.615, -21.951, -9.82, -4.64, + -0.922, 0., 3.994, 150.072, 139.343, -0.505, + 152.459, 153.141, 150.053, 147.183, 146.11, 148.093, + 149.845, 146.275, 155.367, 162.994, 169.658, 152.608, + 163.311, 173.255, 179.806, 158.304, 168.954, 177.697, + 184.216, 163.439, 172.377, 178.879, 183.864, 137.297, + 129.411, 121.703, 118.802, 115.731, 120.401, 126.067, + 131.436, 136.041, 127.648, 134.987, 142.396, 146.487, + 135.083, 142.758, 148.748, 152.58, 143.117, 149.36, + 153.495, 156.382]), + 'scale_factor': 1.4269190517359058, + 'std': np.array([0., 16.41102529, 31.97449978, 46.67209085, 17.08171745, + 27.65228208, 37.84749544, 64.7177804, 56.76208509, 51.92178108, + 51.05505851, 53.54873271, 53.50798706, 53.75848561, 55.10932566, + 57.11749293, 53.06833994, 51.87146695, 53.27754716, 55.13937467, + 53.40742599, 51.39184915, 51.61963029, 53.751026, 54.54957785, + 52.46150946, 52.15593112, 52.43355139, 55.59562924, 58.58003174, + 62.68405472, 66.61520901, 72.01281486, 63.33601275, 68.34514991, + 69.40195571, 71.46270943, 61.3120086, 65.63071323, 66.73936407, + 68.31931276, 59.29323832, 62.04278946, 63.52757574, 65.27939012, + 57.65251598, 59.16748769, 60.96555093, 62.12406873, 0., + 8.30312977, 26.11135416, 53.26602436, 11.54088277, 29.07432405, + 38.48177645, 44.61889948, 46.82268159, 51.29107037, 55.75426756, + 61.06446573, 52.26399693, 56.81973522, 59.92917456, 62.63712187, + 52.8356351, 56.60500224, 59.76263026, 61.87583021, 53.49947275, + 56.04178695, 58.49736055, 60.91419657, 54.07829767, 55.98037934, + 57.79308228, 59.49702097, 56.72689654, 60.67602557, 67.52811852, + 73.58046477, 78.81067592, 71.40644368, 79.9644578, 85.45076889, + 89.87280634, 71.75058255, 79.95804419, 84.27783329, 87.16994798, + 71.19769737, 78.32231761, 82.11062353, 85.16347574, 70.63294777, + 75.82125296, 78.49320974, 80.60754354]) + }, + 'chemistry': {'median': np.array([0., -149., -196., -90., 145., 208., 235., 246., + 230.5, 216., 210., 211., 246., 243., 234., 227., + 254., 245., 231., 223., 258., 248., 235., 227., + 258., 249., 241., 234., -78., -57., -32., -10., + 7., -48., -22., -7., 1., -54., -26., -10., + -3.5, -58., -31., -17., -11., -57., -35., -25., + -19., 0., 6., 218., 261., -5., 163., 145., + 133., 120.5, 108., 104., 103., 101., 99., 102., + 105., 115., 115., 119., 121., 129., 131., 131., + 132., 142., 143., 144., 144., 264., 244., 229., + 225., 217., 226., 225., 232., 237., 241., 241., + 249., 252., 257., 258., 264., 266., 275., 278., + 280., 279.]), + 'mean': np.array([0., -143.897, -190.058, -86.903, 140.926, 217.721, + 264.637, 268.052, 258.891, 250.801, 250.009, 253.757, + 274.376, 274.907, 272.892, 271.535, 283.333, 280.266, + 273.551, 270.236, 287.871, 283.827, 275.362, 270.974, + 288.984, 285.165, 278.785, 274.5, -77.25, -57.731, + -34.007, -13.901, 0.662, -47.15, -22.265, -8.555, + -1.11, -52.623, -24.446, -11.432, -6.058, -55.034, + -29.605, -18.433, -13.892, -54.819, -35.755, -27.038, + -22.352, 0., 6.167, 206.923, 216.101, -6.115, + 152.987, 128.342, 124.441, 115.255, 103.173, 99.066, + 98.378, 89.811, 85.753, 88.11, 90.737, 99.924, + 99.372, 105.05, 107.36, 111.877, 113.49, 117.972, + 119.45, 124.973, 126.641, 128.84, 129.162, 217.45, + 200.878, 185.423, 180.274, 176.279, 180.945, 177.688, + 181.782, 185.668, 194.886, 193.136, 196.672, 198.439, + 209.824, 208.904, 210.14, 210.388, 224.81, 224.579, + 223.847, 222.846]), + 'scale_factor': 1.1010136119625171, + 'std': np.array([0., 17.54703368, 35.7581688, 69.24384154, + 16.8056099, 53.67166067, 105.93793103, 109.97522128, + 119.78379322, 134.17275207, 147.06528115, 156.34138272, + 135.87825663, 152.2457827, 163.87231107, 172.43461014, + 134.22994491, 152.57799725, 160.81889006, 165.36223361, + 132.95311339, 148.92147283, 155.09689538, 158.15342337, + 132.44675815, 145.00957822, 149.17857344, 151.5445677, + 70.96735517, 74.05220212, 79.28031881, 86.12266368, + 91.10061337, 85.69535285, 94.60962306, 97.50350237, + 98.95066397, 87.13493485, 96.15947735, 97.23624518, + 97.90133112, 87.86163465, 96.21616795, 96.25821269, + 96.51474673, 88.89298194, 95.43154078, 95.6565134, + 95.6844193, 0., 9.75823299, 41.59960422, + 85.24175502, 9.68275658, 49.8952586, 94.65167212, + 102.899779, 105.34503299, 110.68926358, 117.52109446, + 123.02262847, 116.44959974, 125.51849263, 131.95820513, + 136.88519946, 120.07525234, 129.27803223, 132.21424848, + 134.24947821, 122.7018332, 129.69374657, 130.27591956, + 130.28394951, 124.37462873, 129.23850092, 129.14637587, + 128.23497868, 92.32908263, 93.77521589, 97.21511236, + 102.35374406, 107.13176541, 95.68562052, 103.19170827, + 108.58317768, 113.0156174, 96.96912397, 104.87073712, + 109.90668049, 113.97662163, 98.84680584, 106.12451547, + 109.89005597, 112.6861813, 101.29443173, 106.64136045, + 109.06442862, 110.48723132])}, + 'conan': {'mean': np.array([0., -109.583, -156.881, -120.313, 109.631, 152.443, + 108.144, 101.458, 92.114, 81.513, 76.847, 74.993, + 91.03, 83.972, 81.598, 81.622, 95.874, 87.808, + 85.537, 85.997, 99.22, 92.344, 90.057, 89.867, + 102.278, 97.839, 95.828, 94.729, -112.329, -107.723, + -101.084, -98.403, -97.372, -113.236, -106.546, -100.633, + -97.867, -115.78, -107.062, -100.376, -97.709, -115.577, + -107.58, -101.694, -98.566, -113.886, -107.761, -103.555, + -100.291, 0., -3.122, 154.074, 210.401, 3.051, + 167.82, 232.961, 237.28, 231.731, 227.719, 228.673, + 230.681, 232.244, 237.448, 240.62, 242.516, 240.195, + 246.415, 249.652, 249.489, 247.002, 253.542, 255.679, + 254.841, 254.024, 258.919, 260.762, 260.172, 216.536, + 210.82, 205.289, 204.407, 205.708, 212.984, 215.676, + 216.734, 217.611, 222.036, 225.523, 225.602, 225.524, + 230.145, 233.169, 232.708, 231.662, 236.648, 239.776, + 239.635, 238.414]), + 'scale_factor': 1.4305381955532037, + 'std': np.array([0., 10.59231377, 32.5747577, 87.34243545, + 11.07947828, 28.23531036, 81.80955485, 94.92227471, + 96.95924404, 102.26105726, 108.35750824, 113.98373985, + 105.4720489, 115.84258809, 120.26970689, 122.74060907, + 105.35736388, 116.27625353, 119.70779687, 121.70631451, + 104.85591829, 114.45731809, 117.19618488, 118.52391873, + 104.49567798, 111.82849851, 114.05433975, 114.97168155, + 99.95218236, 100.40852688, 105.43183079, 111.24512839, + 116.71825742, 109.49891462, 119.41624632, 124.78480801, + 128.55843539, 111.29522721, 121.82060645, 127.12367452, + 129.74770256, 112.66641945, 122.448853, 126.91806161, + 128.77232484, 114.58103248, 122.30987646, 125.25131925, + 126.38526939, 0., 6.75537682, 32.26199814, + 97.00566065, 6.26788633, 24.78805357, 94.33954356, + 105.77244254, 112.9200719, 123.72302146, 132.94217567, + 139.79612741, 125.72067636, 134.83989505, 139.963108, + 143.67274531, 124.0620771, 133.07969332, 137.87443888, + 140.63537208, 122.2001882, 130.13695953, 134.74150051, + 137.58544152, 120.85215523, 126.25547291, 130.08463151, + 131.9334545, 105.97377366, 113.70617222, 125.14332375, + 132.79858189, 140.62497195, 128.00962364, 137.0962473, + 141.59214401, 144.98927436, 126.15767398, 134.63195561, + 138.48896561, 140.98466379, 124.44862384, 131.4354459, + 134.99263956, 137.01364077, 122.28814373, 127.69337424, + 130.25399716, 131.22054185]), + 'median': np.array([0., -110., -153., -106., 110., 149., 106., 97., + 83., 74., 71., 70., 85., 82., 81., 81., + 90., 89., 87., 86., 93., 92., 91., 90., + 95., 93., 94., 92., -101., -91., -78., -74., + -74., -90., -80., -73., -69., -93., -80., -72., + -69., -93., -80., -72., -69.5, -92., -81., -76., + -73., 0., -4., 159., 207., 4., 171., 230., + 235., 225., 213., 212., 216., 219., 226., 233., + 238., 231., 241., 247., 248., 242., 252., 256., + 256., 252., 260., 262., 262., 215., 202., 191., + 189., 193., 207., 210., 209., 210., 220., 222., + 220., 220., 230., 232., 229., 228., 238., 240., + 238., 236.]), + }, + 'ellen': {'median': np.array([0., -129., -172., -147., 129., 171., 141., 133., + 128., 119., 107.5, 101., 124., 121., 118., 118., + 125., 119., 114., 114., 123., 116., 113., 112., + 121., 113., 111., 109., -143., -137., -130., -120., + -116., -139., -137., -135., -135., -142., -138., -134., + -133., -141., -136., -132., -130., -139., -134., -131.5, + -129., 0., -2., 198., 229., 0., 199., 253., + 261., 251., 243., 238., 235., 263., 266., 265., + 265., 273., 277., 277., 274., 280., 284., 283., + 282., 285., 287., 288., 286., 228., 222., 217., + 218., 219., 230., 236., 238., 240., 242., 249., + 251., 250., 251., 257., 257., 255., 257., 262., + 262., 260.]), + 'mean': np.array([0., -118.909, -164.154, -147.246, 118.925, 160.707, + 133.202, 126.031, 122.365, 116.286, 108.719, 106.006, + 118.932, 113.969, 113.305, 115.011, 118.991, 112.665, + 109.438, 109.385, 117.644, 111.169, 108.96, 107.772, + 116.547, 110.923, 109.41, 107.968, -144.238, -140.508, + -135.596, -132.849, -131.928, -145.429, -143.271, -141.212, + -141.904, -146.314, -142.76, -138.772, -137.964, -145.332, + -140.897, -137.975, -136.323, -143.97, -139.602, -138.326, + -136.509, 0., -1.674, 186.215, 222.679, 1.261, + 186.058, 252.238, 256.284, 254.343, 250.461, 249.989, + 248.952, 262.385, 264.888, 265.272, 265.825, 268.538, + 271.047, 272.562, 272.724, 273.782, 276.197, 277.758, + 278.16, 276.863, 279.468, 282.137, 281.792, 221.492, + 219.701, 217.669, 217.751, 219.06, 225.513, 228.427, + 229.216, 229.918, 232.378, 236.486, 238.776, 239.216, + 238.599, 242.803, 244.281, 244.736, 242.657, 246.497, + 248.185, 248.794]), + 'scale_factor': 1.3185415037379011, + 'std': np.array([0., 25.8187668, 41.89625621, 77.94319396, + 26.03765302, 36.60665446, 60.07478003, 68.73073577, + 71.76289971, 78.7986688, 85.17248405, 91.8425063, + 81.57343548, 90.51250764, 96.94054866, 103.783818, + 80.62698629, 90.21175519, 96.14923898, 101.66000578, + 80.05647547, 88.31894723, 94.51180032, 98.51348139, + 80.68749464, 88.48727067, 91.83456811, 94.89260759, + 85.40677582, 91.29359198, 100.86450706, 110.52797021, + 117.97610273, 102.74902899, 113.07612285, 119.03282344, + 124.60098228, 101.47065292, 111.75965462, 117.53972101, + 121.85632812, 99.92104771, 109.12218102, 114.81892864, + 118.45598622, 99.84089893, 107.37479032, 112.15367905, + 113.79890122, 0., 5.87807145, 42.05188194, + 94.98783058, 5.69902439, 41.39882409, 85.89787748, + 92.3838262, 100.51717938, 109.75256935, 116.35522712, + 122.94281474, 110.31635769, 115.28223391, 118.60499153, + 121.26193292, 105.71004, 110.80258477, 115.78894661, + 118.8850614, 103.46256558, 108.43064231, 113.94664293, + 117.36026755, 102.35001823, 107.34869806, 111.92460065, + 114.05449897, 103.00641697, 112.17914066, 121.99203023, + 127.82240414, 133.5572177, 124.74606138, 130.2611403, + 133.63072006, 136.23373032, 120.91488377, 127.17007433, + 131.01745618, 132.5830432, 119.50055313, 125.03061302, + 127.97185643, 129.91735952, 117.63924239, 121.72976625, + 124.69851954, 126.28790743])}, + 'jon': {'mean': np.array([0., -153.331, -220.516, -210.796, 160.263, 215.725, + 113.459, 106.156, 81.586, 56.863, 41.095, 30.188, + 70.704, 50.813, 37.224, 28.324, 78.826, 54.748, + 40.502, 32.23, 84.64, 62.526, 49.402, 42.049, + 87.848, 71.01, 61.826, 55.67, -205.849, -185.743, + -168.707, -161.135, -152.156, -190.978, -188.077, -185.31, + -183.466, -206.802, -204.832, -201.539, -199.346, -218.691, + -218.876, -215.868, -212.868, -227.754, -229.963, -229.562, + -227.931, 0., -2.292, 190.993, 231.52, -1.083, + 220.65, 257.155, 261.606, 251.047, 243.471, 243.151, + 243.737, 237.272, 240.32, 248.198, 255.309, 249.663, + 256.813, 265.245, 269.657, 262.648, 268.498, 275.582, + 278.131, 275.337, 280.445, 284.139, 285.841, 241.15, + 242.714, 245.433, 247.933, 247.489, 234.907, 242.81, + 254.631, 265.567, 239.662, 249.287, 264.716, 275.327, + 246.93, 256.517, 270.548, 280.467, 255.458, 263.393, + 273.444, 281.315]), + 'median': np.array([0., -155., -220., -208., 162., 214., 102., 94., + 66., 37., 19., 6., 53., 32., 16.5, 6., + 64., 38., 22., 13., 71., 48., 33., 25., + 76., 59., 49., 41., -201., -177., -157., -147., + -135., -180., -177., -173., -171., -200., -198., -193., + -191., -214.5, -215., -212., -208., -227., -229., -229., + -226., 0., -4., 192., 253., 0., 231., 283., + 290., 281., 277., 279., 283., 269., 276., 289., + 298., 282., 295., 309., 314., 295., 307., 318., + 321., 309., 318., 324., 327., 268., 273., 282., + 290., 290., 269., 283., 300., 315., 274., 289., + 308., 322., 281., 295., 312., 324., 289., 300., + 313., 322.]), + 'scale_factor': 1.0, + 'std': np.array([0., 13.60674241, 33.3032993, 58.68593003, + 14.80816771, 41.0566849, 86.83690643, 91.52707613, + 96.46133217, 103.81552018, 111.29798729, 116.84988941, + 108.953423, 118.5386858, 121.7592864, 123.60705896, + 109.22557267, 118.82313115, 122.45853174, 125.15311862, + 109.25032906, 117.86855952, 121.46853253, 123.86147342, + 109.55747759, 116.71509714, 119.36929976, 120.8869021, + 69.0435674, 73.14218312, 81.05269367, 88.65717554, + 97.30981278, 84.00480651, 92.37692932, 96.86237608, + 100.46773036, 82.47765028, 91.27185643, 96.26633097, + 99.31366615, 80.67351188, 89.02701064, 93.30791272, + 96.34672063, 80.04087383, 86.67089264, 90.58648992, + 93.14206482, 0., 10.88231299, 30.72401261, + 64.20069782, 11.80025894, 38.96731323, 67.39309293, + 73.62073596, 79.57721276, 89.74364133, 98.04464391, + 105.0374687, 90.90715052, 99.81535754, 105.95001084, + 110.26701011, 91.25035579, 100.25840629, 105.86755393, + 108.97285603, 91.38375182, 99.14092997, 103.26581853, + 106.88325331, 91.70622351, 97.17438436, 99.73720308, + 102.19299251, 75.27206321, 82.64365798, 93.43973197, + 101.8475258, 107.69945162, 95.75379027, 106.19971704, + 113.93437953, 121.08109477, 94.56100547, 104.48614564, + 110.77791, 116.16508973, 94.30819212, 101.95049637, + 106.9177146, 111.37067348, 93.46864841, 99.52853134, + 103.17545669, 105.90626882])}, + 'oliver': {'mean': np.array([0., -163.658, -211.057, -134.649, 164.739, 209.073, + 143.511, 127.913, 116.269, 103.244, 96.397, 93.487, + 127.25, 111.415, 99.029, 90.083, 132.054, 113.831, + 99.465, 91.78, 130.525, 111.719, 97.237, 89.394, + 125.449, 109.247, 100.154, 93.562, -117.402, -100.158, + -82.374, -73.054, -67.696, -111.641, -100.162, -88.474, + -79.601, -119.885, -105.883, -91.469, -83.153, -122.363, + -108.076, -95.898, -88.328, -121.041, -110.486, -103.011, + -97.96, 0., 1.885, 226.397, 229.072, -4.987, + 213.13, 243.52, 255.207, 231.348, 204.754, 188.757, + 177.801, 206.763, 198.08, 197.119, 196.928, 223.176, + 215.778, 214.844, 213.122, 239.312, 233.451, 230.949, + 228.755, 253.464, 248.937, 246.076, 243.138, 232.561, + 211.604, 184.101, 165.544, 151.386, 178.313, 165.155, + 163.223, 162.372, 192.993, 181.789, 181.059, 180.34, + 208.741, 200.467, 198.325, 196.636, 224.591, 218.906, + 215.748, 213.304]), + 'scale_factor': 0.9549234615419752, + 'std': np.array([0., 11.99879311, 29.82817043, 54.03489427, + 12.11713163, 35.70626935, 100.127588, 114.55109529, + 117.61694027, 122.42752331, 129.34672547, 135.35726737, + 120.82736238, 130.10660542, 136.48968517, 139.7453903, + 122.16887936, 132.40670844, 138.38685911, 141.79375727, + 123.35014947, 133.45671223, 139.08317954, 142.11474506, + 126.82329202, 133.81198747, 138.99624558, 141.54416327, + 61.03361693, 62.88763818, 68.25358689, 73.29697868, + 78.72461867, 68.02637811, 76.81368209, 81.46072258, + 84.88038524, 67.26606704, 77.41239766, 82.52517821, + 85.17540485, 67.84581955, 77.28375136, 81.87896919, + 84.03993346, 69.44094843, 77.42084864, 80.58891288, + 82.31571174, 0., 9.47827912, 29.89226306, + 65.29199657, 9.53880658, 33.56934763, 59.26234555, + 71.23782809, 69.40388243, 72.38679081, 78.91335724, + 86.99427222, 74.79921678, 82.52043141, 88.62611827, + 93.39188838, 77.27343026, 84.96602095, 90.18950972, + 93.68292863, 79.41305092, 85.97462183, 89.45524243, + 92.88002463, 80.98173068, 85.93479523, 88.12695515, + 89.9635646, 74.11004169, 73.49017066, 76.71515365, + 82.82322177, 89.53648979, 78.33528599, 87.32146915, + 95.1113835, 101.48254833, 81.44670006, 91.03851097, + 96.9321181, 101.12747599, 84.49668585, 93.10840408, + 97.03436183, 99.7947569, 87.07782564, 93.60984544, + 96.28646061, 97.48070365]), + 'median': np.array([0., -165., -211., -128., 167., 205., 118., 96.5, + 82., 66., 56., 50., 94., 77., 62., 51., + 99., 82., 63., 53., 99., 77., 59., 50., + 92., 72., 60., 53., -110., -93., -74., -63., + -56., -104., -94., -81., -71., -112., -99., -83., + -73., -114., -101., -86., -77., -112., -101., -93., + -86., 0., 2., 227., 257., -4., 219., 267., + 279., 253., 220., 200., 182., 227., 219., 221., + 222., 247., 241., 242., 242., 267., 262., 260., + 258., 284., 280., 277., 273., 264., 241., 210., + 188., 167., 211., 196., 192., 190., 226., 215., + 214., 212., 243., 235., 235., 232., 260., 255., + 253., 251.]), + }, + 'median': np.array([0., -165., -211., -128., 167., 205., 118., 96.5, + 82., 66., 56., 50., 94., 77., 62., 51., + 99., 82., 63., 53., 99., 77., 59., 50., + 92., 72., 60., 53., -110., -93., -74., -63., + -56., -104., -94., -81., -71., -112., -99., -83., + -73., -114., -101., -86., -77., -112., -101., -93., + -86., 0., 2., 227., 257., -4., 219., 267., + 279., 253., 220., 200., 182., 227., 219., 221., + 222., 247., 241., 242., 242., 267., 262., 260., + 258., 284., 280., 277., 273., 264., 241., 210., + 188., 167., 211., 196., 192., 190., 226., 215., + 214., 212., 243., 235., 235., 232., 260., 255., + 253., 251.]), + 'rock': {'mean': np.array([0., -50.691, -71.908, -59.31, 52.451, 78.76, 39.172, + 35.689, 30.193, 21.265, 14.115, 9.367, 21.133, 11.618, + 6.135, 2.322, 20.812, 10.702, 4.949, 1.614, 20.973, + 11.516, 6.355, 3.296, 20.881, 12.962, 9.191, 6.544, + -56.623, -53.151, -46.877, -42.133, -38.616, -51.007, -45.297, + -41.204, -38.231, -51.745, -45.244, -40.455, -37.947, -51.901, + -45.412, -40.849, -38.384, -51.166, -45.603, -42.475, -40.21, + 0., 2.919, 71.638, 62.753, -2.236, 68.846, 61.088, + 62.271, 55.003, 47.461, 44.187, 41.541, 47.717, 45.33, + 44.813, 44.858, 53.181, 51.377, 51.07, 50.835, 58.485, + 57.221, 56.71, 56.121, 63.687, 62.794, 62.234, 61.799, + 62.994, 56.582, 49.842, 46.872, 44.653, 49.173, 46.988, + 47.094, 47.456, 54.367, 52.627, 52.811, 52.692, 59.522, + 58.338, 58.022, 57.695, 64.378, 63.897, 63.478, 62.855]), + 'median': np.array([0., -53., -74., -61., 54., 82., 39., 36., 29., + 18., 9., 3., 19., 9., 3., -1., 19., 8., + 2., -2., 19., 9., 3., 0., 19., 10., 6., + 3., -60., -54., -47., -41., -36., -52., -45., -39.5, + -36., -53., -45., -39., -35., -54., -44., -39., -36., + -52., -45., -41., -38., 0., 3., 75., 67., -2., + 71., 63., 65., 57., 49., 46., 44., 50., 49., + 50., 50., 57., 56., 57., 57., 63., 63., 63., + 62., 69., 69., 68., 68., 67., 60., 52., 49., + 47., 52., 50., 51., 52., 58., 57., 58., 58., + 64., 64., 64., 64., 70., 70., 69., 69.]), + 'scale_factor': 3.0404103081189042, + 'std': np.array([0., 10.4203416, 17.22636166, 24.77684201, 11.10331478, + 17.75664383, 18.53861958, 20.16805095, 20.55582037, 23.09192012, + 25.92936125, 28.61632945, 23.87838585, 26.28121146, 27.0651949, + 28.08466336, 23.62597418, 25.974626, 26.90238649, 28.14425348, + 22.98891626, 25.03772641, 26.18272283, 27.28751333, 22.26856167, + 23.93914276, 24.86223077, 25.93993184, 29.16729112, 28.9929681, + 30.65772123, 32.47656557, 34.01532807, 33.28412461, 35.77885955, + 36.57866023, 37.38547364, 33.74483627, 36.30537789, 36.96885142, + 37.77928256, 34.01107465, 36.46546662, 37.06834497, 37.90272476, + 34.39137165, 36.44109481, 36.96086275, 37.45362332, 0., + 2.89973085, 18.24277819, 20.42860717, 2.76048981, 13.92466459, + 17.1719031, 19.71708799, 19.53716947, 21.61500588, 24.15897413, + 26.41163227, 24.7449977, 27.6888985, 29.30245776, 30.29448524, + 25.62081652, 28.73668163, 30.19561392, 31.28564807, 25.91670841, + 28.75941861, 30.06825402, 31.11173989, 26.03480422, 28.16124223, + 29.02597533, 29.7304322, 23.28072087, 22.67441898, 24.5621464, + 26.42123419, 28.12181699, 26.97838155, 29.05460129, 30.11018373, + 30.98118887, 27.92701758, 30.10780415, 30.95133727, 31.63882324, + 28.46049044, 30.59816589, 30.90853468, 31.19961498, 28.92246732, + 30.4676614, 30.74221066, 30.69778453])}, + 'seth': {'mean': np.array([0.00000e+00, -1.56603e+02, -1.92734e+02, -1.32233e+02, + 1.60156e+02, 1.93264e+02, 7.84210e+01, 6.13590e+01, + 3.63920e+01, 1.03240e+01, 3.24000e+00, 5.41000e-01, + 2.59350e+01, 7.05000e+00, -5.32800e+00, -1.25190e+01, + 3.17200e+01, 8.77700e+00, -6.74700e+00, -1.21180e+01, + 3.12780e+01, 1.12180e+01, -3.30400e+00, -8.38000e+00, + 2.93330e+01, 1.41880e+01, 3.79600e+00, -4.10200e+00, + -1.22910e+02, -1.04130e+02, -8.88500e+01, -7.73330e+01, + -6.66150e+01, -1.06764e+02, -9.15200e+01, -7.91270e+01, + -7.09780e+01, -1.12913e+02, -9.24190e+01, -7.87410e+01, + -7.28720e+01, -1.11232e+02, -9.46040e+01, -8.03730e+01, + -7.46850e+01, -1.05766e+02, -9.38100e+01, -8.45570e+01, + -7.68080e+01, 0.00000e+00, -1.52000e-01, 2.18895e+02, + 2.68892e+02, 3.11000e-01, 2.37138e+02, 2.60438e+02, + 2.62680e+02, 2.46628e+02, 2.28206e+02, 2.22251e+02, + 2.19554e+02, 2.28401e+02, 2.20632e+02, 2.23361e+02, + 2.28428e+02, 2.42971e+02, 2.34319e+02, 2.39472e+02, + 2.41920e+02, 2.55531e+02, 2.52430e+02, 2.51910e+02, + 2.51151e+02, 2.67728e+02, 2.67377e+02, 2.66409e+02, + 2.64099e+02, 2.76742e+02, 2.61222e+02, 2.46399e+02, + 2.44755e+02, 2.41459e+02, 2.54082e+02, 2.51658e+02, + 2.56695e+02, 2.62400e+02, 2.68415e+02, 2.68020e+02, + 2.72175e+02, 2.74369e+02, 2.80791e+02, 2.83345e+02, + 2.84913e+02, 2.83823e+02, 2.92152e+02, 2.94754e+02, + 2.95650e+02, 2.94466e+02]), + 'median': np.array([0., -160., -188., -90., 164., 188., 74., 61., + 30., -5., -13., -15., 14., -17., -37., -47., + 22., -15., -39., -48., 21., -12., -35., -45., + 15., -9., -26., -40., -82., -55., -35., -19., + -1., -50., -19., 3., 17., -54., -12.5, 12., + 23., -50., -14., 12., 23., -38., -14., 6., + 20., 0., 0., 231., 280.5, 0., 243.5, 284., + 288., 270., 243., 237., 237., 250., 236., 243., + 253., 271., 260., 269., 273., 286., 285., 286., + 285., 303., 306., 305., 302., 287., 268., 244., + 241., 234., 260., 251., 258., 266., 279., 277., + 283., 284., 295., 299., 301., 297., 309., 314., + 314., 312.]), + 'scale_factor': 0.9900081765632547, + 'std': np.array([0., 18.55767741, 35.88235282, 101.25383307, + 19.41946611, 38.02917701, 54.45006666, 61.64462766, + 64.83266411, 72.31580065, 77.08544869, 83.25050342, + 74.10669858, 79.17970384, 85.60273603, 88.84267915, + 71.75098327, 79.86841222, 86.6311664, 90.07606828, + 73.40176235, 79.49821681, 84.53137633, 89.01422134, + 72.98420453, 77.71971858, 83.04868683, 87.2696946, + 111.3822423, 118.55779645, 126.86364925, 137.85477181, + 149.29311027, 136.38734657, 153.90880287, 164.70497525, + 173.08522616, 139.26315173, 160.31123304, 173.69513499, + 183.07759452, 143.54942764, 162.06102303, 175.3751404, + 183.9359502, 148.61482175, 163.27092178, 173.38245803, + 181.60665499, 0., 9.34959336, 37.72078969, + 49.82852934, 9.37562153, 35.3828342, 61.75611837, + 69.91869278, 69.9318641, 72.31696595, 75.38982689, + 78.29895966, 76.43157855, 80.22123519, 83.73378457, + 87.34744882, 76.80201924, 79.60171631, 84.10195727, + 87.62829223, 78.27895655, 82.02017496, 84.37008889, + 86.20353936, 80.41539664, 84.47919786, 85.72448728, + 86.26808911, 54.1489006, 55.01033281, 59.79178705, + 62.95996327, 68.53929033, 60.30863351, 64.76271332, + 66.97766773, 69.2184224, 60.48514508, 63.85550564, + 65.756052, 68.09354477, 61.19543544, 64.2909634, + 65.19778701, 67.21045805, 62.52371467, 65.29180258, + 66.37902907, 65.94090418])}, + 'shelly': {'median': np.array([0., -44., -60., -53., 44., 65., 40., 34., 33., 32., 32., + 32., 31., 28., 26., 25., 31., 28., 26., 25., 31., 28., + 26., 25., 30., 28., 27., 26., -49., -49., -52., -55., -56., + -55., -57., -57., -58., -56., -57., -57., -57., -56., -56., -57., + -56., -55., -56., -56., -56., 0., 0., 70., 99., 0., 69., + 84., 86., 84., 81., 79., 77., 85., 85., 84., 83., 88., + 88., 87., 86., 91., 91., 90., 89., 92., 92., 91., 91., + 104., 102., 100., 99., 99., 107., 109., 110., 111., 111., 114., + 115., 115., 115., 118., 119., 119., 117., 120., 121., 121.]), + 'mean': np.array([0.00000e+00, -4.39670e+01, -6.16120e+01, -5.66750e+01, + 4.38510e+01, 6.52460e+01, 4.23140e+01, 3.73650e+01, + 3.57040e+01, 3.45580e+01, 3.44570e+01, 3.45120e+01, + 3.52910e+01, 3.31350e+01, 3.19850e+01, 3.09850e+01, + 3.51300e+01, 3.28280e+01, 3.14670e+01, 3.08340e+01, + 3.51450e+01, 3.27940e+01, 3.16110e+01, 3.05940e+01, + 3.47090e+01, 3.29040e+01, 3.22040e+01, 3.11530e+01, + -5.37470e+01, -5.32070e+01, -5.42620e+01, -5.62220e+01, + -5.78420e+01, -5.81480e+01, -5.93390e+01, -5.98870e+01, + -6.00440e+01, -5.89260e+01, -5.98730e+01, -5.95250e+01, + -5.97740e+01, -5.86410e+01, -5.93170e+01, -5.91710e+01, + -5.94890e+01, -5.80360e+01, -5.85560e+01, -5.88470e+01, + -5.87110e+01, 0.00000e+00, -9.70000e-02, 6.69060e+01, + 9.19000e+01, -4.16000e-01, 6.65610e+01, 8.19670e+01, + 8.38610e+01, 8.18550e+01, 7.90090e+01, 7.72360e+01, + 7.61760e+01, 8.11380e+01, 8.04890e+01, 7.99180e+01, + 7.96400e+01, 8.42210e+01, 8.41600e+01, 8.36870e+01, + 8.31360e+01, 8.67250e+01, 8.67590e+01, 8.62470e+01, + 8.56190e+01, 8.85240e+01, 8.86890e+01, 8.82870e+01, + 8.75840e+01, 9.63500e+01, 9.51850e+01, 9.30020e+01, + 9.14720e+01, 9.06760e+01, 9.54290e+01, 9.63560e+01, + 9.65460e+01, 9.66780e+01, 9.88890e+01, 9.99080e+01, + 1.00404e+02, 1.00404e+02, 1.01832e+02, 1.02755e+02, + 1.03448e+02, 1.03503e+02, 1.04009e+02, 1.05167e+02, + 1.05725e+02, 1.06154e+02]), + 'scale_factor': 3.570953563050855, + 'std': np.array([0., 5.97075464, 18.08920827, 33.06550128, 6.05266875, + 18.51673524, 32.79398427, 37.3746408, 38.17355608, 41.0028857, + 43.89186885, 46.5736176, 42.3892005, 46.75329694, 48.91665131, + 50.78065355, 42.70645267, 46.58178202, 48.63481172, 50.08577087, + 42.50759903, 45.92044821, 47.71441794, 49.30368307, 42.41971616, + 45.19445524, 46.42829293, 47.59075111, 38.04543325, 38.97140171, + 40.60807008, 42.62166956, 45.15082542, 42.1350934, 44.92540572, + 47.14573396, 49.22062641, 42.10518405, 45.64207347, 47.96439695, + 49.88419513, 42.26076335, 45.48921313, 47.56071655, 49.44677825, + 42.74354576, 45.27401975, 46.8043544, 48.27524706, 0., + 4.71376612, 19.68423643, 46.76840814, 4.8071763, 17.32877027, + 37.98475893, 44.62642355, 44.27660754, 46.27339321, 49.00555381, + 51.54084811, 48.50106139, 51.8787999, 54.01876781, 55.82720126, + 49.17186349, 53.1482869, 55.37935564, 56.9398411, 49.60537647, + 53.15278844, 55.01163505, 56.67173757, 49.79045515, 52.37627592, + 53.64364483, 54.5685527, 53.91743225, 54.13352727, 55.90663642, + 58.86213737, 61.06556332, 58.86913418, 62.15002224, 64.03989291, + 65.85472129, 59.38530693, 62.81211297, 64.88760116, 66.08761445, + 59.60905784, 62.46751936, 64.10454973, 65.54253574, 59.33884831, + 61.90099443, 63.20263741, 64.26842369])}} + + +SPEAKERS_CONFIG['Lisa_Feldman_Barrett'] = { + 'median': np.ones(98) * 112, + 'scale_factor': 1, + 'mean': np.array([ 0. , -1.19841738, -1.58047268, -1.4045821 , 1.2758864 , + 1.77768676, 1.65703035, 1.61845832, 1.59244072, 1.48879356, + 1.40160497, 1.36102816, 1.53588248, 1.36215933, 1.26510949, + 1.19200311, 1.45941759, 1.26593316, 1.1738424 , 1.10540102, + 1.3722366 , 1.20770126, 1.12864624, 1.06831708, 1.30632046, + 1.17984234, 1.11749213, 1.06460759, -1.37441402, -1.35158357, + -1.29301242, -1.22775457, -1.16972311, -1.33576587, -1.23453882, + -1.14965901, -1.08120032, -1.31386467, -1.18257884, -1.08857249, + -1.01432993, -1.27846248, -1.15234329, -1.06544065, -0.99501345, + -1.2416203 , -1.13701188, -1.06792966, -1.00916048, 0. , + 0.56662626, 2.48479235, 2.1363345 , 0.45417501, 2.35629813, + 2.38806224, 2.36420379, 2.18185715, 1.96066104, 1.84163623, + 1.77816598, 2.11228409, 1.9678031 , 1.89541172, 1.83834353, + 2.18465058, 2.0487781 , 1.98426972, 1.92258174, 2.23186942, + 2.12684667, 2.07277724, 2.01655666, 2.28049702, 2.2027903 , + 2.15986948, 2.11150698, 2.08887799, 1.88739571, 1.70462141, + 1.59624438, 1.51880991, 1.76258563, 1.68480154, 1.6382096 , + 1.59391717, 1.88683326, 1.79962303, 1.74649191, 1.69354692, + 1.98660209, 1.9059621 , 1.84958903, 1.80192587, 2.07046951, + 2.00581994, 1.96129077, 1.91901187]), + 'std': np.array([1.00000000e-09, 1.76916032e-01, 3.55399528e-01, 7.72094753e-01, + 1.84809040e-01, 3.46885513e-01, 6.94195932e-01, 7.80062801e-01, + 8.77986782e-01, 1.09597705e+00, 1.26726032e+00, 1.35675574e+00, + 1.15670325e+00, 1.39478340e+00, 1.48536362e+00, 1.54313323e+00, + 1.20312300e+00, 1.41754663e+00, 1.47550790e+00, 1.51397373e+00, + 1.22895065e+00, 1.39268560e+00, 1.42610007e+00, 1.45145684e+00, + 1.22330584e+00, 1.34219012e+00, 1.36608911e+00, 1.38689408e+00, + 8.48972445e-01, 9.16183586e-01, 1.01957184e+00, 1.12775862e+00, + 1.22240615e+00, 1.13235026e+00, 1.26911958e+00, 1.32638650e+00, + 1.36662817e+00, 1.14346996e+00, 1.26913627e+00, 1.31328970e+00, + 1.34089795e+00, 1.12620388e+00, 1.23142265e+00, 1.26397872e+00, + 1.28137205e+00, 1.09756192e+00, 1.17724380e+00, 1.20309554e+00, + 1.22028958e+00, 1.00000000e-09, 9.04773899e-02, 3.07732335e-01, + 5.37526342e-01, 1.09008217e-01, 3.10256394e-01, 6.42911520e-01, + 7.15965962e-01, 7.58809378e-01, 8.56852460e-01, 1.00070470e+00, + 1.06392498e+00, 8.97523961e-01, 1.10467392e+00, 1.18871236e+00, + 1.23741103e+00, 9.33567811e-01, 1.14429458e+00, 1.19862626e+00, + 1.23185743e+00, 9.70476463e-01, 1.14796893e+00, 1.17120094e+00, + 1.18865253e+00, 9.85530394e-01, 1.11727692e+00, 1.13773721e+00, + 1.16315194e+00, 6.26625527e-01, 6.55486595e-01, 7.19463952e-01, + 7.85708784e-01, 8.41750680e-01, 8.19581326e-01, 8.93514819e-01, + 9.27416670e-01, 9.55377400e-01, 8.37890312e-01, 9.16777747e-01, + 9.39984132e-01, 9.58576726e-01, 8.35891695e-01, 9.03037061e-01, + 9.17925737e-01, 9.26210664e-01, 8.21419854e-01, 8.70511144e-01, + 8.80774458e-01, 8.87293754e-01]) +} + +SPEAKERS_CONFIG['Amel_Karboul'] = { + 'median': np.ones(98) * 112, + 'scale_factor': 1, + 'mean': np.array([ 0. , -1.31694646, -1.7182911 , -1.4229387 , 1.43300073, + 1.90203809, 1.02702671, 0.86147811, 0.7523877 , 0.59240631, + 0.43787492, 0.30771354, 0.55607061, 0.33617568, 0.21176807, + 0.1035814 , 0.53735101, 0.28595784, 0.16503721, 0.07199452, + 0.51410739, 0.2780363 , 0.17586396, 0.09493798, 0.49115767, + 0.30839241, 0.22946503, 0.15296569, -1.44171535, -1.38051611, + -1.2706162 , -1.17360722, -1.10071166, -1.37784478, -1.23175278, + -1.12603394, -1.04466992, -1.3976284 , -1.21930714, -1.1082398 , + -1.02286535, -1.39882507, -1.22857474, -1.12760953, -1.04923929, + -1.38873537, -1.25572297, -1.17423253, -1.11249841, 0. , + 0.72992549, 3.20478178, 2.74745657, 0.60989877, 3.05667283, + 2.81257127, 2.68400513, 2.46016952, 2.30887215, 2.25025663, + 2.18478005, 2.40968863, 2.39325012, 2.3859585 , 2.37106601, + 2.55379422, 2.54426606, 2.52255946, 2.49455527, 2.65988151, + 2.64391092, 2.61598359, 2.57995377, 2.73033623, 2.71651668, + 2.693263 , 2.65664704, 2.57984597, 2.36010878, 2.2032952 , + 2.13959683, 2.09660923, 2.25189186, 2.2422468 , 2.25385632, + 2.252677 , 2.40281431, 2.39491628, 2.38881862, 2.37257373, + 2.52759149, 2.51890148, 2.49875008, 2.4762942 , 2.62450517, + 2.60768974, 2.59020701, 2.55683496]), + 'std': np.array([1.00000000e-09, 3.94340201e-01, 7.46216428e-01, 1.25409609e+00, + 4.48989120e-01, 6.33724388e-01, 1.03087481e+00, 1.20060967e+00, + 1.27201328e+00, 1.36162935e+00, 1.44756678e+00, 1.53149847e+00, + 1.48976371e+00, 1.59383183e+00, 1.62068916e+00, 1.63951742e+00, + 1.50054577e+00, 1.58732326e+00, 1.59727258e+00, 1.60040304e+00, + 1.48661276e+00, 1.55780683e+00, 1.55942161e+00, 1.55721045e+00, + 1.46524075e+00, 1.51474948e+00, 1.51373124e+00, 1.51820939e+00, + 1.33664619e+00, 1.39896886e+00, 1.50359920e+00, 1.61010120e+00, + 1.71089947e+00, 1.66896808e+00, 1.80657982e+00, 1.83613596e+00, + 1.84787867e+00, 1.68731414e+00, 1.79727068e+00, 1.80677831e+00, + 1.79939450e+00, 1.66458069e+00, 1.74878824e+00, 1.74475312e+00, + 1.73101650e+00, 1.62001011e+00, 1.68301239e+00, 1.68016133e+00, + 1.67137502e+00, 1.00000000e-09, 1.85029978e-01, 6.19248000e-01, + 7.03050558e-01, 1.75646450e-01, 5.44235904e-01, 6.50877893e-01, + 1.09117114e+00, 1.11728498e+00, 1.14748641e+00, 1.18866237e+00, + 1.23594361e+00, 1.18641947e+00, 1.26764022e+00, 1.32525317e+00, + 1.38685002e+00, 1.26395150e+00, 1.35033661e+00, 1.39207535e+00, + 1.42608879e+00, 1.34876036e+00, 1.43823018e+00, 1.45561989e+00, + 1.47191535e+00, 1.43581331e+00, 1.49487696e+00, 1.49660077e+00, + 1.52114195e+00, 1.11707360e+00, 1.12800405e+00, 1.15627454e+00, + 1.19445103e+00, 1.26172596e+00, 1.21221330e+00, 1.29930050e+00, + 1.33494815e+00, 1.36807089e+00, 1.27666516e+00, 1.36428756e+00, + 1.38710129e+00, 1.40355525e+00, 1.32994896e+00, 1.41117447e+00, + 1.42606820e+00, 1.42707226e+00, 1.37706426e+00, 1.44888593e+00, + 1.45229674e+00, 1.46827511e+00]) +} + +SPEAKERS_CONFIG['Bill_Gates'] = { + 'median': np.ones(98) * 112, + 'scale_factor': 1, + 'mean': np.array([ 0. , -1.65901123, -2.62040504, -1.67567383, 1.5779478 , + 2.27677556, 1.42385704, 1.3170184 , 1.19879 , 0.96268522, + 0.74065908, 0.55666252, 1.04719535, 0.76037056, 0.56424102, + 0.40574464, 1.05062748, 0.68746701, 0.47590087, 0.32751844, + 1.0213114 , 0.67258543, 0.48671625, 0.34948249, 0.98083327, + 0.70350887, 0.56702056, 0.45242877, -1.58726746, -1.4728882 , + -1.27845931, -1.12835936, -1.00697183, -1.39988316, -1.20836639, + -1.05706819, -0.94011508, -1.45365837, -1.22129736, -1.05546607, + -0.94138093, -1.47802005, -1.25897673, -1.10377321, -0.99176046, + -1.47791562, -1.31199306, -1.20487043, -1.10330991, 0. , + 0.63376724, 2.9768444 , 1.9201872 , 0.61075744, 3.05337817, + 2.37583868, 2.2788781 , 2.00000366, 1.82876588, 1.79312354, + 1.76777677, 1.91960979, 1.8979598 , 1.89223526, 1.90009094, + 2.13137775, 2.10057917, 2.06954037, 2.04540326, 2.30330089, + 2.26177726, 2.21134076, 2.16438097, 2.44113155, 2.39222672, + 2.34630512, 2.29878913, 1.73476003, 1.51328262, 1.33038028, + 1.19058549, 1.09880832, 1.16350011, 1.04367205, 1.04802128, + 1.07784193, 1.2713513 , 1.16845977, 1.19792404, 1.23544504, + 1.41444819, 1.33930394, 1.36110806, 1.38602307, 1.57745357, + 1.53407041, 1.53709354, 1.53643241]), + 'std': np.array([1.00000000e-09, 2.54368567e-01, 5.83332917e-01, 1.15142710e+00, + 1.97158141e-01, 3.77624570e-01, 6.40216173e-01, 7.58684416e-01, + 8.50109244e-01, 9.21852221e-01, 9.86584956e-01, 1.03646303e+00, + 1.03406142e+00, 1.10359513e+00, 1.13120303e+00, 1.15306422e+00, + 1.04380541e+00, 1.10986972e+00, 1.12975535e+00, 1.14217362e+00, + 1.03486066e+00, 1.09802824e+00, 1.10983464e+00, 1.11674439e+00, + 1.01764547e+00, 1.06938455e+00, 1.07777407e+00, 1.08700549e+00, + 1.24625350e+00, 1.29106444e+00, 1.40470767e+00, 1.53967733e+00, + 1.66036111e+00, 1.50520367e+00, 1.64354779e+00, 1.72179679e+00, + 1.78148170e+00, 1.53200939e+00, 1.66855278e+00, 1.74710506e+00, + 1.80164873e+00, 1.53887342e+00, 1.65943740e+00, 1.73015571e+00, + 1.78040136e+00, 1.54134692e+00, 1.63370080e+00, 1.68624933e+00, + 1.73739507e+00, 1.00000000e-09, 1.41867468e-01, 6.74737065e-01, + 9.09216435e-01, 1.30571354e-01, 3.34107582e-01, 5.16529942e-01, + 6.46399789e-01, 7.09281247e-01, 7.69013323e-01, 8.34192038e-01, + 8.79042370e-01, 8.23650557e-01, 9.05584322e-01, 9.40793735e-01, + 9.63994407e-01, 8.55480876e-01, 9.36165947e-01, 9.55800937e-01, + 9.62736167e-01, 8.65513890e-01, 9.47473627e-01, 9.59739778e-01, + 9.59384063e-01, 8.71633274e-01, 9.32182625e-01, 9.48551686e-01, + 9.55036059e-01, 1.14048164e+00, 1.15221438e+00, 1.18467852e+00, + 1.19077197e+00, 1.23125204e+00, 1.19134360e+00, 1.24101455e+00, + 1.27935240e+00, 1.32779115e+00, 1.22295546e+00, 1.27777633e+00, + 1.31858545e+00, 1.35349085e+00, 1.25384595e+00, 1.30098491e+00, + 1.33062427e+00, 1.34747032e+00, 1.28365129e+00, 1.32616956e+00, + 1.33520423e+00, 1.34267033e+00]) +} + +SPEAKERS_CONFIG['Christina_Wallace'] = { + 'median': np.ones(98) * 112, + 'scale_factor': 1, + 'mean': np.array([ 0. , -1.4270045 , -1.92638165, -1.54798809, 1.47699254, + 1.90600553, 1.51393449, 1.36865848, 1.27669665, 1.16583805, + 1.08766372, 1.03883452, 1.30709821, 1.13669758, 0.97883178, + 0.83445061, 1.29967894, 1.06185055, 0.8753138 , 0.71678297, + 1.25859363, 1.02610507, 0.85781108, 0.71904575, 1.20389917, + 1.02209034, 0.89906534, 0.7949335 , -1.52016294, -1.44237293, + -1.31756207, -1.21881972, -1.14342114, -1.44205991, -1.30853941, + -1.20292522, -1.11382424, -1.46689069, -1.30332438, -1.19207267, + -1.1068739 , -1.46969676, -1.31106321, -1.21296044, -1.1395027 , + -1.45676802, -1.32510608, -1.25315889, -1.2049942 , 0. , + 0.80477358, 3.1520563 , 2.65176425, 0.6842569 , 2.96016206, + 2.92202314, 2.76900744, 2.49262877, 2.30488874, 2.22522464, + 2.157103 , 2.40696086, 2.36739691, 2.35786848, 2.35455045, + 2.58498435, 2.54525383, 2.52320048, 2.50268181, 2.74161117, + 2.70101106, 2.66125496, 2.62429763, 2.87051536, 2.83655962, + 2.79596374, 2.75763666, 2.45200628, 2.15476683, 1.92561859, + 1.80779182, 1.71216641, 1.89485111, 1.80403182, 1.78664954, + 1.78645247, 2.05067525, 1.96392261, 1.94261299, 1.92343288, + 2.20805317, 2.14009071, 2.11155272, 2.08524038, 2.36703949, + 2.32318318, 2.29717179, 2.25810948]), + 'std': np.array([1.00000000e-09, 3.15892363e-01, 5.27419215e-01, 8.60761116e-01, + 3.33738621e-01, 5.11970534e-01, 8.57437195e-01, 1.09036769e+00, + 1.23041182e+00, 1.31049909e+00, 1.39528031e+00, 1.46771086e+00, + 1.46831417e+00, 1.53102854e+00, 1.52621133e+00, 1.51389910e+00, + 1.47988804e+00, 1.51637656e+00, 1.49585122e+00, 1.46897428e+00, + 1.44751216e+00, 1.47397165e+00, 1.44958831e+00, 1.43128788e+00, + 1.39493292e+00, 1.41321046e+00, 1.39802501e+00, 1.39100324e+00, + 9.51229709e-01, 1.01169750e+00, 1.09201916e+00, 1.16875331e+00, + 1.24810391e+00, 1.20803181e+00, 1.30662112e+00, 1.33877434e+00, + 1.36455059e+00, 1.21241944e+00, 1.30243857e+00, 1.33020706e+00, + 1.35661141e+00, 1.19779165e+00, 1.27055697e+00, 1.29153018e+00, + 1.30931299e+00, 1.17334091e+00, 1.23426890e+00, 1.24387600e+00, + 1.26221962e+00, 1.00000000e-09, 1.83972226e-01, 6.54627835e-01, + 8.42258433e-01, 1.60072830e-01, 5.65656929e-01, 7.28566891e-01, + 1.06977669e+00, 1.20146446e+00, 1.23194815e+00, 1.25501319e+00, + 1.25392834e+00, 1.28325650e+00, 1.31368782e+00, 1.33579219e+00, + 1.34915712e+00, 1.34450905e+00, 1.37450866e+00, 1.38547012e+00, + 1.38486337e+00, 1.37526730e+00, 1.40363948e+00, 1.40120156e+00, + 1.40164597e+00, 1.38798591e+00, 1.40940050e+00, 1.40540798e+00, + 1.40472439e+00, 1.12791412e+00, 1.21940263e+00, 1.26500979e+00, + 1.28566902e+00, 1.34443802e+00, 1.32931399e+00, 1.36867346e+00, + 1.41149110e+00, 1.45194651e+00, 1.36288876e+00, 1.42833085e+00, + 1.47681322e+00, 1.52678291e+00, 1.38814975e+00, 1.44718508e+00, + 1.48900934e+00, 1.51451143e+00, 1.39038701e+00, 1.43982392e+00, + 1.45530627e+00, 1.49761704e+00]) +} + +SPEAKERS_CONFIG['Enric_Sala'] = { + 'median': np.ones(98) * 112, + 'scale_factor': 1, + 'mean': np.array([ 0. , -1.35818404, -1.99522808, -1.51621112, 1.29967358, + 1.80377432, 1.41801134, 1.37077015, 1.2707847 , 1.1519362 , + 1.04618594, 0.94584526, 1.28510573, 1.14571809, 1.02793932, + 0.92025301, 1.31806471, 1.13281933, 1.00019173, 0.89648685, + 1.31747197, 1.13369087, 1.01495231, 0.93043523, 1.30041234, + 1.15315992, 1.06669468, 0.99273335, -1.47316361, -1.37781129, + -1.26004572, -1.15626102, -1.07273451, -1.39947422, -1.25269704, + -1.14463695, -1.05566838, -1.43779378, -1.24520699, -1.13505081, + -1.05098459, -1.44144737, -1.2570889 , -1.16052068, -1.09125391, + -1.43564641, -1.28842017, -1.21656355, -1.1573583 , 0. , + 0.61463935, 2.60566422, 3.13376125, 0.64517708, 2.65426302, + 3.2518751 , 3.30187523, 3.19037572, 3.15678588, 3.21427094, + 3.27446716, 3.31614183, 3.42560433, 3.46519365, 3.47871403, + 3.48251426, 3.5871208 , 3.59425829, 3.58099856, 3.60778525, + 3.68265183, 3.66713227, 3.6433546 , 3.70535138, 3.74814899, + 3.73393347, 3.70972399, 3.18611283, 3.05145905, 2.99792959, + 3.03806888, 3.09556377, 3.14632478, 3.24808558, 3.27730973, + 3.29154081, 3.30214787, 3.39943918, 3.40355177, 3.39618276, + 3.42181796, 3.50056104, 3.48921923, 3.4705326 , 3.51933868, + 3.57344501, 3.56552845, 3.54484765]), + 'std': np.array([1.00000000e-09, 2.47454420e-01, 5.03401057e-01, 1.20887959e+00, + 3.00871872e-01, 5.01953618e-01, 1.00320887e+00, 1.13565304e+00, + 1.25386670e+00, 1.41329585e+00, 1.55548797e+00, 1.68754894e+00, + 1.50087593e+00, 1.66182898e+00, 1.73326397e+00, 1.79422987e+00, + 1.47607994e+00, 1.62708113e+00, 1.68715100e+00, 1.73150286e+00, + 1.43156163e+00, 1.56552454e+00, 1.61634250e+00, 1.64912641e+00, + 1.38068864e+00, 1.48929794e+00, 1.52723414e+00, 1.55752142e+00, + 1.31074931e+00, 1.41083647e+00, 1.53916860e+00, 1.66531759e+00, + 1.77132544e+00, 1.63693787e+00, 1.77349398e+00, 1.81035470e+00, + 1.83711038e+00, 1.61840768e+00, 1.74140249e+00, 1.75696546e+00, + 1.76840213e+00, 1.57872715e+00, 1.68651331e+00, 1.69424962e+00, + 1.69612079e+00, 1.52972393e+00, 1.61435700e+00, 1.62203628e+00, + 1.62810229e+00, 1.00000000e-09, 1.13093005e-01, 4.87157598e-01, + 8.85808758e-01, 1.12909219e-01, 4.46723082e-01, 7.46145049e-01, + 9.09121445e-01, 9.22220204e-01, 9.65918942e-01, 1.01564210e+00, + 1.06492639e+00, 1.03327246e+00, 1.10921566e+00, 1.15769432e+00, + 1.20875200e+00, 1.06174779e+00, 1.14117939e+00, 1.18829758e+00, + 1.22048378e+00, 1.07145947e+00, 1.16131801e+00, 1.21777935e+00, + 1.24044224e+00, 1.07538777e+00, 1.16143573e+00, 1.21099780e+00, + 1.24304848e+00, 1.03808473e+00, 1.06470920e+00, 1.11524177e+00, + 1.18451450e+00, 1.25379623e+00, 1.21344021e+00, 1.28953693e+00, + 1.30931911e+00, 1.31329498e+00, 1.23072566e+00, 1.29495064e+00, + 1.30268243e+00, 1.29624343e+00, 1.23177526e+00, 1.29026257e+00, + 1.28765506e+00, 1.28100897e+00, 1.22373237e+00, 1.26962411e+00, + 1.27057886e+00, 1.26748468e+00]) +} + +SPEAKERS_CONFIG['GabeBarcia_Colombo'] = { + 'median': np.ones(98) * 112, + 'scale_factor': 1, + 'mean': np.array([ 0. , -1.84495687, -2.69811904, -1.68580485, 1.71445981, + 2.54691899, 1.94953072, 1.8616302 , 1.7447443 , 1.58822019, + 1.43564604, 1.30988838, 1.68882468, 1.36734826, 1.19536413, + 1.06951097, 1.66675671, 1.2773567 , 1.10034357, 0.97928662, + 1.64189969, 1.24680075, 1.08185443, 0.99469991, 1.6248793 , + 1.29935872, 1.14806931, 1.06077605, -1.61581423, -1.514775 , + -1.36913424, -1.20935257, -1.0847552 , -1.49382951, -1.27910079, + -1.14929049, -1.04834415, -1.5288084 , -1.28098417, -1.14810588, + -1.05630366, -1.56309994, -1.31391729, -1.19683654, -1.11319707, + -1.58372119, -1.38532164, -1.29225921, -1.2318247 , 0. , + 0.74351202, 3.14052412, 3.61160746, 0.61762302, 2.94543759, + 3.21793606, 3.17623992, 2.96471007, 2.79140821, 2.70632029, + 2.62646841, 3.01170032, 2.85379343, 2.79241089, 2.75216528, + 3.12300778, 2.93599049, 2.8702409 , 2.83110624, 3.24093329, + 3.03220637, 2.95394198, 2.93976633, 3.35662444, 3.16495979, + 3.06929703, 3.03455172, 3.41829076, 2.75711668, 2.41390421, + 2.37783849, 2.39171412, 2.31918492, 2.32707244, 2.35760768, + 2.39847052, 2.49001349, 2.48501887, 2.51587391, 2.55217904, + 2.59721794, 2.61692203, 2.6357909 , 2.67657661, 2.7425024 , + 2.72646607, 2.7344342 , 2.73467115]), + 'std': np.array([1.00000000e-09, 3.03040898e-01, 5.54888698e-01, 1.00838241e+00, + 3.22603886e-01, 5.75065145e-01, 9.59593245e-01, 1.11490148e+00, + 1.22495861e+00, 1.41551243e+00, 1.64280434e+00, 1.84093348e+00, + 1.54192654e+00, 1.84348246e+00, 1.95372614e+00, 2.01973030e+00, + 1.57775407e+00, 1.86733175e+00, 1.92488323e+00, 1.94238880e+00, + 1.54047720e+00, 1.83068057e+00, 1.87313393e+00, 1.84125575e+00, + 1.46840841e+00, 1.70662975e+00, 1.77005827e+00, 1.76827824e+00, + 1.13617677e+00, 1.30927152e+00, 1.48452586e+00, 1.61979114e+00, + 1.72672499e+00, 1.66721720e+00, 1.81271658e+00, 1.84822928e+00, + 1.85363651e+00, 1.67998462e+00, 1.79811887e+00, 1.79525071e+00, + 1.76980691e+00, 1.64527248e+00, 1.74581916e+00, 1.72946955e+00, + 1.70111804e+00, 1.58430135e+00, 1.67423017e+00, 1.66383068e+00, + 1.64902373e+00, 1.00000000e-09, 1.60586711e-01, 5.01050133e-01, + 1.22083058e+00, 2.01485606e-01, 5.51927431e-01, 1.18514090e+00, + 1.35051642e+00, 1.44829275e+00, 1.52843740e+00, 1.68899924e+00, + 1.83329746e+00, 1.58603913e+00, 1.88069329e+00, 2.02450631e+00, + 2.11225959e+00, 1.64348597e+00, 2.01159682e+00, 2.10641999e+00, + 2.13700315e+00, 1.63811824e+00, 2.03930706e+00, 2.12789487e+00, + 2.10143109e+00, 1.59571843e+00, 1.92730945e+00, 2.05113707e+00, + 2.06904590e+00, 1.49594690e+00, 1.97519757e+00, 2.34286029e+00, + 2.49726379e+00, 2.60969731e+00, 2.68042942e+00, 2.86134905e+00, + 2.89788702e+00, 2.89090070e+00, 2.73998856e+00, 2.92547569e+00, + 2.92390909e+00, 2.88795446e+00, 2.79916681e+00, 2.92640858e+00, + 2.90701012e+00, 2.83642308e+00, 2.75932873e+00, 2.89695251e+00, + 2.88276608e+00, 2.85808018e+00]) +} + +SPEAKERS_CONFIG['Keller_Rinaudo'] = { + 'median': np.ones(98) * 112, + 'scale_factor': 1, + 'mean': np.array([ 0. , -1.78738992, -2.52147346, -1.93147501, 1.7345912 , + 2.28126498, 1.80134684, 1.74950939, 1.70865035, 1.64598065, + 1.59876342, 1.56364775, 1.64049298, 1.44702273, 1.29737506, + 1.15978579, 1.60573273, 1.3513232 , 1.17343081, 1.0309139 , + 1.54647142, 1.29477194, 1.12975028, 0.99637595, 1.47658534, + 1.27125765, 1.14344831, 1.02926293, -1.89615414, -1.85618838, + -1.7557093 , -1.65046768, -1.5609413 , -1.90192044, -1.73017835, + -1.56669773, -1.42028344, -1.885858 , -1.64412137, -1.45635819, + -1.30234121, -1.82820541, -1.58951764, -1.41692479, -1.27332804, + -1.75709109, -1.55786215, -1.42217143, -1.30984509, 0. , + 0.74594772, 3.4710353 , 3.62595917, 0.86981054, 3.71350345, + 3.57781239, 3.53919815, 3.2447969 , 3.00514423, 2.88083382, + 2.76486544, 3.20482203, 3.17108081, 3.15224654, 3.13849198, + 3.42064124, 3.38753665, 3.35724291, 3.3207806 , 3.59082033, + 3.55998269, 3.52200485, 3.47627073, 3.72510632, 3.70366315, + 3.677839 , 3.64453334, 3.5995842 , 3.33920424, 3.14636835, + 3.06954596, 3.01363391, 3.35671406, 3.35289845, 3.32710375, + 3.2980236 , 3.58684019, 3.56762257, 3.51312514, 3.45327324, + 3.75518034, 3.72820413, 3.66560378, 3.60659574, 3.88239346, + 3.85662784, 3.81101137, 3.75610839]), + 'std': np.array([1.00000000e-09, 3.00803024e-01, 5.95395929e-01, 1.24007866e+00, + 3.55361426e-01, 6.57735124e-01, 1.09734760e+00, 1.23416447e+00, + 1.32174657e+00, 1.44978389e+00, 1.58884179e+00, 1.72281572e+00, + 1.61785405e+00, 1.80790148e+00, 1.90028709e+00, 1.98271841e+00, + 1.64021436e+00, 1.81500580e+00, 1.89275859e+00, 1.96006245e+00, + 1.63051994e+00, 1.78339469e+00, 1.83989325e+00, 1.89482212e+00, + 1.60499391e+00, 1.72208982e+00, 1.76085781e+00, 1.79964821e+00, + 1.33217873e+00, 1.42696386e+00, 1.56515476e+00, 1.72033356e+00, + 1.85993785e+00, 1.73046674e+00, 1.91366543e+00, 1.96581268e+00, + 1.99654040e+00, 1.73212774e+00, 1.89253094e+00, 1.92691167e+00, + 1.95335719e+00, 1.69667588e+00, 1.83129231e+00, 1.85534601e+00, + 1.87732654e+00, 1.64770601e+00, 1.74669774e+00, 1.76613183e+00, + 1.78500658e+00, 1.00000000e-09, 1.99940812e-01, 4.61767005e-01, + 8.86016683e-01, 1.78029993e-01, 4.46492749e-01, 8.34407226e-01, + 9.68574599e-01, 1.04408871e+00, 1.11846635e+00, 1.19657357e+00, + 1.27295590e+00, 1.23160439e+00, 1.33707309e+00, 1.38255663e+00, + 1.42277279e+00, 1.26791466e+00, 1.36124994e+00, 1.39390257e+00, + 1.42477071e+00, 1.28538613e+00, 1.37880952e+00, 1.39610214e+00, + 1.41331194e+00, 1.28955077e+00, 1.36759402e+00, 1.38333730e+00, + 1.40136053e+00, 1.09128797e+00, 1.17685705e+00, 1.23353951e+00, + 1.30591386e+00, 1.38113247e+00, 1.30241085e+00, 1.41565837e+00, + 1.44668198e+00, 1.46415263e+00, 1.32942308e+00, 1.43587619e+00, + 1.45757280e+00, 1.47607923e+00, 1.33333896e+00, 1.44767063e+00, + 1.45959983e+00, 1.44564938e+00, 1.32247867e+00, 1.43168819e+00, + 1.43918479e+00, 1.44440415e+00]) +} + +SPEAKERS_CONFIG['Kelly_Richmond_Pope'] = { + 'median': np.ones(98) * 112, + 'scale_factor': 1, + 'mean': np.array([ 0. , -1.48920134, -2.04893888, -0.95811192, 1.53300051, + 2.24192387, 1.50126426, 1.452441 , 1.40995624, 1.32044366, + 1.21032949, 1.12295017, 1.36272458, 1.20827239, 1.10143403, + 1.01569953, 1.35161153, 1.15768619, 1.03055541, 0.93638324, + 1.32506645, 1.13612809, 1.01059573, 0.91763815, 1.29039457, + 1.13856614, 1.04259999, 0.95911214, -0.87217257, -0.79400782, + -0.61366814, -0.41359908, -0.26693407, -0.69234072, -0.45127755, + -0.30424043, -0.18361645, -0.68727446, -0.40878744, -0.26196051, + -0.12778578, -0.67384948, -0.4121739 , -0.28130532, -0.16207837, + -0.65321919, -0.44440213, -0.34437091, -0.26847412, 0. , + 0.67350852, 3.3491127 , 2.50428321, 0.6538276 , 3.24886128, + 2.3682413 , 2.25934672, 2.02525658, 1.80509312, 1.67796826, + 1.57840341, 1.84773248, 1.74631718, 1.69360846, 1.66369471, + 1.99722079, 1.90388072, 1.85660309, 1.81572471, 2.13883096, + 2.0685992 , 2.0233066 , 1.97585357, 2.27204943, 2.22339606, + 2.19150311, 2.15926461, 2.37668845, 2.13731186, 1.94049134, + 1.86580005, 1.8473729 , 1.93791764, 1.90136516, 1.91613481, + 1.92907644, 2.11309842, 2.09415798, 2.10249546, 2.10249497, + 2.28015051, 2.27229193, 2.26712395, 2.25328978, 2.43768441, + 2.42824371, 2.4197904 , 2.40696451]), + 'std': np.array([1.00000000e-09, 2.53923399e-01, 3.90349116e-01, 8.86542199e-01, + 2.64533865e-01, 4.67456184e-01, 9.57308502e-01, 1.05762501e+00, + 1.08304933e+00, 1.17116325e+00, 1.29735782e+00, 1.40475584e+00, + 1.33563237e+00, 1.49510416e+00, 1.58435843e+00, 1.64858495e+00, + 1.38879380e+00, 1.53750448e+00, 1.61926354e+00, 1.68104516e+00, + 1.41067178e+00, 1.53666612e+00, 1.60155752e+00, 1.65738377e+00, + 1.41558444e+00, 1.51120789e+00, 1.55193307e+00, 1.59553769e+00, + 9.57186781e-01, 1.01433076e+00, 1.10980873e+00, 1.21412544e+00, + 1.29085191e+00, 1.23113801e+00, 1.33107644e+00, 1.35769798e+00, + 1.37313639e+00, 1.22772994e+00, 1.31015772e+00, 1.32479179e+00, + 1.33650539e+00, 1.19986054e+00, 1.26647044e+00, 1.27027957e+00, + 1.28295690e+00, 1.16675008e+00, 1.20854985e+00, 1.20908509e+00, + 1.21409621e+00, 1.00000000e-09, 1.43384782e-01, 4.33339156e-01, + 5.33316834e-01, 1.48132557e-01, 4.46659827e-01, 6.40434285e-01, + 7.78412924e-01, 7.81349440e-01, 8.14052593e-01, 8.63403730e-01, + 9.31923445e-01, 9.20377208e-01, 9.99067650e-01, 1.03005370e+00, + 1.07615607e+00, 9.47913515e-01, 1.02064282e+00, 1.05034200e+00, + 1.08621213e+00, 9.60807105e-01, 1.02294155e+00, 1.04839428e+00, + 1.07767468e+00, 9.69767078e-01, 1.01846695e+00, 1.03706538e+00, + 1.05214278e+00, 7.34361389e-01, 7.61960806e-01, 8.32154190e-01, + 8.76982362e-01, 9.27080655e-01, 8.46693571e-01, 9.00331500e-01, + 9.29004850e-01, 9.57198495e-01, 8.52791373e-01, 9.12588534e-01, + 9.28051883e-01, 9.55032351e-01, 8.59636232e-01, 9.15094378e-01, + 9.23725282e-01, 9.33535195e-01, 8.60888967e-01, 9.10895457e-01, + 9.13422374e-01, 9.16148590e-01]) +} + +SPEAKERS_CONFIG['Laurel_Braitman'] = { + 'median': np.ones(98) * 112, + 'scale_factor': 1, + 'mean': np.array([ 0. , -1.26868863, -1.87098366, -1.01995915, 1.230246 , + 1.60567757, 1.45242051, 1.23634666, 1.16628013, 1.1057192 , + 1.07131581, 1.05656517, 1.20836373, 1.09665823, 0.9902999 , + 0.90071179, 1.20313211, 1.04215165, 0.92025741, 0.8300174 , + 1.16211425, 1.00103395, 0.89293162, 0.8055235 , 1.10862517, + 0.97934472, 0.89707228, 0.82220811, -0.94882764, -0.86471772, + -0.72952711, -0.60066635, -0.48841124, -0.76140571, -0.5753176 , + -0.47160224, -0.39194415, -0.7628756 , -0.54260269, -0.44328739, + -0.37462327, -0.75913291, -0.55397894, -0.47474268, -0.41818146, + -0.75292986, -0.59082045, -0.53717163, -0.49239772, 0. , + 0.58733535, 2.83305761, 2.79689939, 0.6442057 , 2.94243467, + 2.98168055, 2.67673856, 2.43593169, 2.25937488, 2.17248602, + 2.09747913, 2.38516201, 2.3484576 , 2.31316694, 2.29628302, + 2.52705886, 2.4891615 , 2.4510063 , 2.4289973 , 2.63961436, + 2.6013599 , 2.56102321, 2.52856099, 2.7281941 , 2.70113829, + 2.6678961 , 2.63724203, 2.71331603, 2.48670563, 2.32928079, + 2.27551938, 2.22813743, 2.41694582, 2.4097357 , 2.40269522, + 2.40198668, 2.59073198, 2.58847824, 2.56716072, 2.55636086, + 2.72991609, 2.72422104, 2.68881188, 2.66177392, 2.85225476, + 2.83451976, 2.80707064, 2.77812353]), + 'std': np.array([1.00000000e-09, 3.36207485e-01, 5.65952623e-01, 1.09601006e+00, + 3.60507252e-01, 5.53363660e-01, 8.71330284e-01, 1.33924323e+00, + 1.44572371e+00, 1.54197319e+00, 1.63208459e+00, 1.71566427e+00, + 1.64561863e+00, 1.75387708e+00, 1.80758940e+00, 1.84653336e+00, + 1.67346574e+00, 1.77167339e+00, 1.80851089e+00, 1.82561550e+00, + 1.67726643e+00, 1.76190819e+00, 1.78139931e+00, 1.79185364e+00, + 1.66774100e+00, 1.72982745e+00, 1.74159894e+00, 1.74778621e+00, + 1.21998417e+00, 1.30897581e+00, 1.43805233e+00, 1.58257482e+00, + 1.71347226e+00, 1.59926923e+00, 1.74417587e+00, 1.76421543e+00, + 1.76709188e+00, 1.60155239e+00, 1.71146535e+00, 1.70132364e+00, + 1.68349095e+00, 1.57224090e+00, 1.65104565e+00, 1.63127111e+00, + 1.61659914e+00, 1.52619759e+00, 1.58085971e+00, 1.56229479e+00, + 1.55421028e+00, 1.00000000e-09, 1.46427209e-01, 3.93662281e-01, + 6.07065617e-01, 1.40398330e-01, 4.15765168e-01, 8.21108252e-01, + 1.49612292e+00, 1.57773251e+00, 1.62226852e+00, 1.66815451e+00, + 1.71084934e+00, 1.64497118e+00, 1.72792269e+00, 1.79774214e+00, + 1.84316768e+00, 1.70014648e+00, 1.78365470e+00, 1.85094675e+00, + 1.87586549e+00, 1.74789087e+00, 1.84158057e+00, 1.87892728e+00, + 1.89528924e+00, 1.78775845e+00, 1.85496164e+00, 1.88014832e+00, + 1.89393290e+00, 9.61556295e-01, 1.02146958e+00, 1.03281056e+00, + 1.07426619e+00, 1.12808412e+00, 1.10425374e+00, 1.19528669e+00, + 1.24159298e+00, 1.25975681e+00, 1.14086979e+00, 1.23393208e+00, + 1.26579025e+00, 1.26930605e+00, 1.18156855e+00, 1.26150247e+00, + 1.28309710e+00, 1.27165891e+00, 1.20423411e+00, 1.28015288e+00, + 1.29460707e+00, 1.29846411e+00]) +} + +SPEAKERS_CONFIG['FeiFei_Li'] = { + 'median': np.ones(98) * 112, + 'scale_factor': 1, + 'mean': np.array([ 0. , -1.32922645, -1.85814849, -1.08771012, 1.31875965, + 1.85768118, 1.29200868, 1.22904343, 1.20470005, 1.13733549, + 1.04021281, 0.95618545, 1.08070787, 0.9127705 , 0.81111326, + 0.72210966, 1.03326398, 0.84261813, 0.72898407, 0.64112826, + 0.9885785 , 0.80918202, 0.70531016, 0.62213903, 0.9532218 , + 0.80309979, 0.72434084, 0.65517283, -1.0211285 , -1.00481072, + -0.91805299, -0.7853383 , -0.65963172, -0.93565827, -0.74934263, + -0.61536215, -0.51357046, -0.88191073, -0.65044258, -0.51759307, + -0.41819717, -0.81539671, -0.60156789, -0.48974676, -0.4037503 , + -0.75825194, -0.58648166, -0.49967798, -0.43140236, 0. , + 0.61523094, 2.99101973, 2.82795483, 0.60267024, 2.94560678, + 2.74179787, 2.70832957, 2.48603879, 2.29970555, 2.20981195, + 2.1420304 , 2.45159169, 2.40626175, 2.36825043, 2.33335382, + 2.61385036, 2.56056323, 2.50423411, 2.44792169, 2.73575519, + 2.68339696, 2.62064784, 2.55983791, 2.83030044, 2.78722326, + 2.73925705, 2.69403961, 2.79279904, 2.57665918, 2.3895283 , + 2.31054748, 2.26792986, 2.58411972, 2.52509296, 2.45802689, + 2.40038508, 2.75640485, 2.67235726, 2.57642929, 2.49189751, + 2.87460685, 2.79051299, 2.69637763, 2.61361412, 2.95931518, + 2.89017918, 2.82079246, 2.75081641]), + 'std': np.array([1.00000000e-09, 4.01270237e-01, 6.61906050e-01, 8.67357396e-01, + 3.03886422e-01, 4.93930744e-01, 1.00315836e+00, 1.08994935e+00, + 1.15358595e+00, 1.24476238e+00, 1.34774675e+00, 1.44224902e+00, + 1.39750397e+00, 1.52809400e+00, 1.57175281e+00, 1.60226655e+00, + 1.40374661e+00, 1.51618595e+00, 1.54362715e+00, 1.55882622e+00, + 1.37685150e+00, 1.46544658e+00, 1.47670399e+00, 1.48185369e+00, + 1.33674645e+00, 1.40200064e+00, 1.40459527e+00, 1.40535630e+00, + 9.48128665e-01, 1.00615584e+00, 1.08558650e+00, 1.17923007e+00, + 1.26695933e+00, 1.24538786e+00, 1.37805274e+00, 1.40899941e+00, + 1.41940145e+00, 1.24818458e+00, 1.35478648e+00, 1.37026815e+00, + 1.37491440e+00, 1.22248680e+00, 1.30768321e+00, 1.31584247e+00, + 1.32338228e+00, 1.18181831e+00, 1.24531856e+00, 1.25399418e+00, + 1.26433002e+00, 1.00000000e-09, 1.10433605e-01, 5.04997533e-01, + 7.33180740e-01, 1.37702062e-01, 4.71276884e-01, 6.89300725e-01, + 7.92184413e-01, 8.04213970e-01, 8.34114798e-01, 8.79516872e-01, + 9.32751580e-01, 9.42807610e-01, 1.01749133e+00, 1.03730247e+00, + 1.05578894e+00, 9.73649737e-01, 1.03839012e+00, 1.04586362e+00, + 1.05041444e+00, 9.81489248e-01, 1.03739286e+00, 1.03896862e+00, + 1.04136529e+00, 9.75381561e-01, 1.01787587e+00, 1.01801296e+00, + 1.01824082e+00, 8.63257643e-01, 8.69766294e-01, 8.91727664e-01, + 9.36124853e-01, 9.95056827e-01, 9.86869017e-01, 1.05790103e+00, + 1.06921752e+00, 1.07701399e+00, 1.01192141e+00, 1.07526567e+00, + 1.07471000e+00, 1.07149221e+00, 1.01778730e+00, 1.07193667e+00, + 1.06738504e+00, 1.06095046e+00, 1.01741729e+00, 1.05838525e+00, + 1.05548662e+00, 1.05223751e+00]) +} + +SPEAKERS_CONFIG['Stacy_Smith_no_good'] = { + 'median': np.ones(98) * 112, + 'scale_factor': 1, + 'mean': np.array([ 0. , -1.28912846, -1.82666229, -1.35234695, 1.23076528, + 1.77778629, 1.46766701, 1.2864184 , 1.16931298, 1.03653975, + 0.93779213, 0.85664462, 1.1496161 , 0.99602766, 0.87014513, + 0.76366205, 1.17915496, 0.99367346, 0.84317816, 0.72764322, + 1.17152378, 0.99716021, 0.85468635, 0.74959164, 1.15385648, + 0.9993751 , 0.90511679, 0.81964507, -1.47832537, -1.38889348, + -1.26019428, -1.14480445, -1.05120991, -1.36250422, -1.20209989, + -1.09052991, -0.99777226, -1.39280199, -1.19262039, -1.08192402, + -0.99773317, -1.39784506, -1.20799554, -1.11034083, -1.03707921, + -1.39170543, -1.24008711, -1.16296191, -1.10167654, 0. , + 0.61929535, 2.72414904, 2.47944463, 0.5863552 , 2.7016951 , + 2.75339457, 2.41096758, 2.20635027, 2.05928953, 2.04628715, + 2.01123381, 2.16740804, 2.20001709, 2.21225638, 2.22729995, + 2.31252076, 2.3862505 , 2.37268093, 2.38261426, 2.43845144, + 2.49573994, 2.47743702, 2.45915275, 2.52378712, 2.56074717, + 2.56927303, 2.56214156, 2.11320469, 1.9121716 , 1.7689074 , + 1.74395265, 1.73559125, 1.83924951, 1.85552279, 1.87943692, + 1.89891177, 1.99870556, 2.02764915, 2.03559116, 2.03329632, + 2.13951958, 2.16618261, 2.15927578, 2.14845359, 2.2632546 , + 2.27707884, 2.26971367, 2.25506023]), + 'std': np.array([1.00000000e-09, 8.92823700e-01, 1.10002413e+00, 1.25177429e+00, + 8.67292927e-01, 1.19831099e+00, 1.24394302e+00, 1.54490497e+00, + 1.58548243e+00, 1.63609205e+00, 1.68833646e+00, 1.73873945e+00, + 1.74329229e+00, 1.80779239e+00, 1.84333634e+00, 1.86225693e+00, + 1.76127359e+00, 1.78418224e+00, 1.81400435e+00, 1.83209982e+00, + 1.76080909e+00, 1.76318761e+00, 1.79387041e+00, 1.80816269e+00, + 1.75039940e+00, 1.77014976e+00, 1.76389817e+00, 1.76620365e+00, + 1.25364787e+00, 1.31967154e+00, 1.42704749e+00, 1.54403416e+00, + 1.64492832e+00, 1.55475606e+00, 1.70313704e+00, 1.75592604e+00, + 1.79449466e+00, 1.56272222e+00, 1.69054730e+00, 1.72806532e+00, + 1.75556244e+00, 1.54003812e+00, 1.64737428e+00, 1.67175392e+00, + 1.69299364e+00, 1.50712116e+00, 1.58714538e+00, 1.60323988e+00, + 1.61936470e+00, 1.00000000e-09, 2.10650255e-01, 5.80080474e-01, + 8.71761322e-01, 2.08856441e-01, 5.51268298e-01, 9.19000830e-01, + 1.74667232e+00, 1.79278955e+00, 1.89100094e+00, 1.89618453e+00, + 1.96661885e+00, 1.87435677e+00, 1.92546311e+00, 1.99802652e+00, + 2.04679046e+00, 1.93376470e+00, 1.89509466e+00, 2.01419452e+00, + 2.04884810e+00, 1.97020168e+00, 1.96831543e+00, 2.06804319e+00, + 2.14399675e+00, 2.07640471e+00, 2.09425503e+00, 2.09651364e+00, + 2.12507376e+00, 1.93803292e+00, 1.92517322e+00, 1.94642894e+00, + 1.96688456e+00, 2.00492763e+00, 1.97795985e+00, 2.03396255e+00, + 2.05612630e+00, 2.07518205e+00, 2.03102159e+00, 2.07296839e+00, + 2.09946614e+00, 2.12195313e+00, 2.06436884e+00, 2.10324549e+00, + 2.12321785e+00, 2.13830186e+00, 2.09191381e+00, 2.12934365e+00, + 2.13460452e+00, 2.14289938e+00]) + +} + +SPEAKERS_CONFIG['Sara_DeWitt'] = { + 'median': np.ones(98) * 112, + 'scale_factor': 1, + 'mean': np.array([ 0. , -1.37184282, -1.8510818 , -1.54915377, 1.38336419, + 1.82950771, 1.55901071, 1.48288919, 1.45420276, 1.4138943 , + 1.37400342, 1.34755763, 1.49987204, 1.38169897, 1.27917511, + 1.19474766, 1.47641123, 1.31217632, 1.189728 , 1.09872595, + 1.42046492, 1.26240205, 1.15318272, 1.07199526, 1.35531189, + 1.23125444, 1.14840915, 1.08043586, -1.52510937, -1.52114492, + -1.47066622, -1.4152586 , -1.37078736, -1.59187905, -1.48237387, + -1.35908197, -1.25351705, -1.56158309, -1.39027923, -1.24751337, + -1.13664092, -1.49661378, -1.32506876, -1.19790324, -1.10028302, + -1.42000002, -1.27619745, -1.18184548, -1.10681848, 0. , + 0.58821466, 2.83916611, 2.66064128, 0.54920348, 2.84379604, + 2.55048422, 2.46041205, 2.24073286, 2.09088933, 2.03565867, + 1.99563058, 2.24380314, 2.24432869, 2.23993982, 2.2324649 , + 2.40863862, 2.40556716, 2.38845106, 2.36521753, 2.53180453, + 2.52828881, 2.50349085, 2.47501088, 2.6259943 , 2.62576882, + 2.6074102 , 2.5821455 , 2.61906493, 2.40004186, 2.22496362, + 2.148853 , 2.09807848, 2.38809314, 2.3594683 , 2.32370973, + 2.28265472, 2.5680801 , 2.51820904, 2.45815535, 2.38993265, + 2.69831853, 2.64741075, 2.57859469, 2.51283887, 2.79593769, + 2.75382193, 2.70167479, 2.64477413]), + 'std': np.array([1.00000000e-09, 3.19702173e-01, 5.70710490e-01, 9.40193212e-01, + 2.30106787e-01, 4.98340463e-01, 9.95014065e-01, 1.17869299e+00, + 1.29255058e+00, 1.44317287e+00, 1.58822968e+00, 1.72162930e+00, + 1.56582042e+00, 1.73487927e+00, 1.81749454e+00, 1.88562954e+00, + 1.56889072e+00, 1.72860382e+00, 1.79983476e+00, 1.85779923e+00, + 1.54069426e+00, 1.67741963e+00, 1.73679736e+00, 1.78952475e+00, + 1.50475116e+00, 1.60597745e+00, 1.65099349e+00, 1.69306938e+00, + 1.01589140e+00, 1.10478856e+00, 1.23974056e+00, 1.36961289e+00, + 1.48531092e+00, 1.36046920e+00, 1.52300219e+00, 1.59427667e+00, + 1.64095845e+00, 1.35903130e+00, 1.50975437e+00, 1.56420591e+00, + 1.60196547e+00, 1.33059447e+00, 1.45974952e+00, 1.49938532e+00, + 1.53141177e+00, 1.29262250e+00, 1.38835560e+00, 1.41807375e+00, + 1.44714845e+00, 1.00000000e-09, 1.28313997e-01, 3.23792872e-01, + 6.13309103e-01, 1.07324369e-01, 3.58237395e-01, 5.96216403e-01, + 8.69704275e-01, 9.21380449e-01, 9.68927982e-01, 1.03352768e+00, + 1.09851581e+00, 1.05808113e+00, 1.14243368e+00, 1.17406046e+00, + 1.19991012e+00, 1.08538578e+00, 1.15249799e+00, 1.17259143e+00, + 1.19080480e+00, 1.08763195e+00, 1.14360648e+00, 1.15451993e+00, + 1.17010702e+00, 1.08639138e+00, 1.11511799e+00, 1.12028988e+00, + 1.13727037e+00, 7.41432525e-01, 7.87060922e-01, 8.47197151e-01, + 9.19670949e-01, 9.84115403e-01, 9.31612119e-01, 1.01833688e+00, + 1.04747188e+00, 1.05795749e+00, 9.56169595e-01, 1.03566663e+00, + 1.03753945e+00, 1.03120827e+00, 9.57368323e-01, 1.01635040e+00, + 1.00632120e+00, 9.94429438e-01, 9.33024789e-01, 9.79701117e-01, + 9.75321867e-01, 9.71103681e-01]) +} + +SPEAKERS_CONFIG['Vicki_Arroyo'] = { + 'median': np.ones(98) * 112, + 'scale_factor': 1, + 'mean': np.array([ 0. , -1.43015381, -1.93072716, -1.929016 , 1.43051456, + 1.79539336, 1.33079277, 1.25639017, 1.28800148, 1.29916159, + 1.2688963 , 1.23790049, 1.26336022, 1.14759621, 1.04971903, + 0.96159823, 1.16789119, 1.02311467, 0.91799632, 0.83852686, + 1.07868753, 0.94445513, 0.85248386, 0.7840061 , 0.99818056, + 0.89274694, 0.82251076, 0.76641284, -1.93565045, -1.93953328, + -1.93574896, -1.92793866, -1.92159172, -2.12664706, -2.08316812, + -1.99678621, -1.9072207 , -2.11964038, -2.02626014, -1.90842131, + -1.80383381, -2.06213015, -1.97182217, -1.86424354, -1.76999184, + -1.99063965, -1.92653751, -1.85345277, -1.79058625, 0. , + 0.5366566 , 2.62596687, 3.34771149, 0.59633497, 2.85982185, + 3.05133782, 3.04521947, 2.87884006, 2.75948967, 2.71992979, + 2.69011686, 2.97652132, 2.9439777 , 2.88281983, 2.81884489, + 3.09882659, 3.04049107, 2.95073582, 2.86394547, 3.16664405, + 3.10559218, 3.01155166, 2.92665827, 3.20955635, 3.15880562, + 3.08549625, 3.01226139, 3.37345152, 3.2525375 , 3.21093273, + 3.28166123, 3.37082102, 3.35384735, 3.39932079, 3.42763475, + 3.42134346, 3.47143328, 3.47240795, 3.47935384, 3.44573709, + 3.51852575, 3.49118564, 3.48159362, 3.46044281, 3.5574022 , + 3.50824217, 3.48756869, 3.44470165]), + 'std': np.array([1.00000000e-09, 1.98321179e-01, 3.65893394e-01, 8.13108210e-01, + 1.92243730e-01, 4.72762268e-01, 1.07649132e+00, 1.17324452e+00, + 1.25214082e+00, 1.37015180e+00, 1.48734952e+00, 1.59654778e+00, + 1.51558568e+00, 1.65919638e+00, 1.72542276e+00, 1.77329541e+00, + 1.53057672e+00, 1.66220571e+00, 1.71258737e+00, 1.75177504e+00, + 1.51229552e+00, 1.62181707e+00, 1.66071336e+00, 1.69485294e+00, + 1.47954694e+00, 1.56446652e+00, 1.59586682e+00, 1.62164764e+00, + 8.92229897e-01, 9.95605539e-01, 1.12670865e+00, 1.24781256e+00, + 1.37759646e+00, 1.18206296e+00, 1.31750537e+00, 1.38031731e+00, + 1.42592459e+00, 1.16083349e+00, 1.28224211e+00, 1.32149570e+00, + 1.35393411e+00, 1.13551088e+00, 1.23588437e+00, 1.26101072e+00, + 1.28127341e+00, 1.10344225e+00, 1.18708923e+00, 1.20536018e+00, + 1.22461608e+00, 1.00000000e-09, 1.22265068e-01, 3.05127656e-01, + 7.86917301e-01, 1.23384493e-01, 3.01131291e-01, 6.14514003e-01, + 7.03279054e-01, 7.53473953e-01, 8.14138865e-01, 8.82095867e-01, + 9.55904653e-01, 9.04382528e-01, 9.77918198e-01, 9.95956130e-01, + 1.00630861e+00, 9.03510933e-01, 9.64448856e-01, 9.71159208e-01, + 9.74589465e-01, 8.84347353e-01, 9.35398779e-01, 9.42869443e-01, + 9.44980258e-01, 8.60567952e-01, 9.03796075e-01, 9.13007394e-01, + 9.16145058e-01, 9.55193321e-01, 1.10075037e+00, 1.23259112e+00, + 1.34038825e+00, 1.43884475e+00, 1.34053504e+00, 1.54123044e+00, + 1.59214216e+00, 1.64685761e+00, 1.35076605e+00, 1.55973958e+00, + 1.57388014e+00, 1.59611442e+00, 1.36340179e+00, 1.55857961e+00, + 1.55661706e+00, 1.53356894e+00, 1.32204978e+00, 1.51173798e+00, + 1.51811364e+00, 1.52194107e+00]) +} + +SPEAKERS_CONFIG['daniel_susskind'] = { + 'median': np.ones(98) * 112, + 'scale_factor': 1, + 'mean': np.array([ 0. , -1.59053417, -2.07767324, -1.09338101, 1.53651005, + 2.07084279, 1.2657888 , 0.8681602 , 0.66315872, 0.52595727, + 0.39664132, 0.3043368 , 0.51631851, 0.32829812, 0.18732256, + 0.07684901, 0.48270404, 0.2639835 , 0.11948665, 0.02808017, + 0.44566855, 0.23281344, 0.11988744, 0.03746794, 0.42008354, + 0.25205698, 0.16868312, 0.08951147, -1.08305768, -1.06480092, + -0.95743421, -0.85046964, -0.77385595, -0.91953657, -0.75436002, + -0.64821094, -0.56641812, -0.88654683, -0.71014125, -0.60564969, + -0.51675058, -0.87868211, -0.7100917 , -0.62287558, -0.53371606, + -0.87128532, -0.73365135, -0.66487187, -0.6010172 , 0. , + 0.80913979, 3.10558228, 2.86609018, 0.82263361, 3.09710402, + 2.97046181, 2.50935519, 2.09754339, 1.89945285, 1.7717207 , + 1.66913613, 1.96346754, 1.84878667, 1.78802765, 1.74045962, + 2.08457766, 1.98113303, 1.90852925, 1.85822288, 2.19858117, + 2.09435954, 2.04130035, 1.98663781, 2.30054038, 2.22927723, + 2.19146339, 2.14266844, 2.21134899, 1.92635412, 1.71005566, + 1.58447383, 1.46891345, 1.77906342, 1.6506884 , 1.59957187, + 1.56647093, 1.9352285 , 1.78048865, 1.72444274, 1.69291546, + 2.03953173, 1.90688235, 1.84841421, 1.81998571, 2.13551298, + 2.03088161, 1.98572487, 1.94822127]), + 'std': np.array([1.00000000e-09, 1.59984526e-01, 3.71958943e-01, 8.04411612e-01, + 1.74722568e-01, 3.81710706e-01, 7.96934376e-01, 1.23157336e+00, + 1.42127868e+00, 1.46218747e+00, 1.51725434e+00, 1.56354361e+00, + 1.50183811e+00, 1.53116700e+00, 1.57249393e+00, 1.60876483e+00, + 1.51951950e+00, 1.53756627e+00, 1.57464871e+00, 1.59548878e+00, + 1.53529253e+00, 1.55579021e+00, 1.56807497e+00, 1.58601754e+00, + 1.53521940e+00, 1.54050200e+00, 1.54392706e+00, 1.57615098e+00, + 1.01259207e+00, 1.04026598e+00, 1.10163391e+00, 1.18173389e+00, + 1.26469362e+00, 1.16053978e+00, 1.25700256e+00, 1.30358778e+00, + 1.33594040e+00, 1.17731439e+00, 1.27270321e+00, 1.30926889e+00, + 1.33660293e+00, 1.17969519e+00, 1.26832284e+00, 1.29551559e+00, + 1.31791898e+00, 1.17907833e+00, 1.24974262e+00, 1.27150920e+00, + 1.29216308e+00, 1.00000000e-09, 1.39032854e-01, 4.13297725e-01, + 7.88365923e-01, 1.38186475e-01, 3.90189221e-01, 8.28030489e-01, + 1.49467153e+00, 1.71065521e+00, 1.71583076e+00, 1.75900338e+00, + 1.76294228e+00, 1.72395545e+00, 1.77391448e+00, 1.84705865e+00, + 1.90261393e+00, 1.80421566e+00, 1.84299394e+00, 1.91089415e+00, + 1.94007365e+00, 1.87522200e+00, 1.92484334e+00, 1.94498226e+00, + 1.96099144e+00, 1.92051272e+00, 1.94099774e+00, 1.94739367e+00, + 1.97663235e+00, 1.82340904e+00, 1.89657934e+00, 1.90242621e+00, + 1.92978618e+00, 1.98043178e+00, 1.90902226e+00, 1.99603060e+00, + 2.03725386e+00, 2.05815040e+00, 1.95205565e+00, 2.07340564e+00, + 2.09608670e+00, 2.09626055e+00, 2.02195332e+00, 2.13121618e+00, + 2.14183609e+00, 2.11333070e+00, 2.07392000e+00, 2.15925925e+00, + 2.16443834e+00, 2.15144128e+00]) +} + +SPEAKERS_CONFIG['Dan_Ariely'] = { + 'median': np.ones(98) * 112, + 'scale_factor': 1, + 'mean': np.array([ 0. , -1.52061031, -2.17871201, -1.03637348, 1.56602338, + 2.31081532, 1.77478227, 1.61504379, 1.53390586, 1.42455973, + 1.32371477, 1.24964299, 1.53398251, 1.37291062, 1.26958856, + 1.18524512, 1.53706434, 1.33582853, 1.22476471, 1.1454513 , + 1.5101916 , 1.31308426, 1.21578703, 1.14428053, 1.46831911, + 1.31617502, 1.23916827, 1.17693301, -0.96139877, -0.90631806, + -0.75598977, -0.59161681, -0.45065024, -0.79540805, -0.54159407, + -0.41611286, -0.33142785, -0.7614939 , -0.47573752, -0.36941758, + -0.30542756, -0.72732643, -0.47009014, -0.38776467, -0.34032827, + -0.69908842, -0.50057372, -0.43429543, -0.39393205, 0. , + 0.66674422, 3.04998383, 2.60987325, 0.57170469, 2.92785166, + 2.44628093, 2.23481382, 1.98033512, 1.78840014, 1.68658471, + 1.62219602, 1.80432371, 1.72569816, 1.70779376, 1.69374821, + 1.94255916, 1.87217421, 1.85019987, 1.8276927 , 2.07621784, + 2.01156338, 1.98462304, 1.95593189, 2.19452345, 2.14429292, + 2.11716802, 2.08607231, 2.46856471, 2.19464317, 1.95160432, + 1.83740104, 1.77678689, 1.98001143, 1.93008268, 1.93066418, + 1.93192938, 2.15116316, 2.10266662, 2.10101372, 2.10144014, + 2.28950988, 2.24983751, 2.23831407, 2.23473248, 2.40976666, + 2.37807174, 2.36727801, 2.35060381]), + 'std': np.array([1.00000000e-09, 4.15709961e-01, 8.63784606e-01, 1.39586573e+00, + 4.46869217e-01, 7.74005933e-01, 1.39981938e+00, 1.63286196e+00, + 1.71068199e+00, 1.80196237e+00, 1.90788898e+00, 1.99967461e+00, + 1.94158869e+00, 2.09099786e+00, 2.14943006e+00, 2.19501581e+00, + 1.96984213e+00, 2.10393692e+00, 2.14374080e+00, 2.17494699e+00, + 1.97395035e+00, 2.08474048e+00, 2.10532438e+00, 2.12577471e+00, + 1.96539115e+00, 2.04273440e+00, 2.05453112e+00, 2.06815662e+00, + 1.46735606e+00, 1.49355078e+00, 1.54880998e+00, 1.63410442e+00, + 1.71308092e+00, 1.70952946e+00, 1.80315575e+00, 1.79935387e+00, + 1.79220798e+00, 1.74662800e+00, 1.80248497e+00, 1.77700708e+00, + 1.75310241e+00, 1.75264468e+00, 1.78928310e+00, 1.75556924e+00, + 1.72524800e+00, 1.74399645e+00, 1.76887337e+00, 1.74224653e+00, + 1.72189501e+00, 1.00000000e-09, 1.93716444e-01, 8.26189920e-01, + 1.19764453e+00, 2.01174651e-01, 8.77663849e-01, 1.23915253e+00, + 1.54105959e+00, 1.56067125e+00, 1.57109225e+00, 1.61951346e+00, + 1.66636871e+00, 1.64021549e+00, 1.72638677e+00, 1.77999450e+00, + 1.82788868e+00, 1.70918452e+00, 1.78934765e+00, 1.82899510e+00, + 1.85758404e+00, 1.76601563e+00, 1.84187674e+00, 1.86065846e+00, + 1.87840475e+00, 1.81812130e+00, 1.86787059e+00, 1.87686413e+00, + 1.89151743e+00, 1.41058649e+00, 1.45753614e+00, 1.47947725e+00, + 1.54831582e+00, 1.61956323e+00, 1.58221102e+00, 1.67662787e+00, + 1.70486072e+00, 1.72831709e+00, 1.65098387e+00, 1.73569401e+00, + 1.74720782e+00, 1.75079631e+00, 1.73507651e+00, 1.79883512e+00, + 1.79031076e+00, 1.77445969e+00, 1.79758386e+00, 1.84456882e+00, + 1.82801945e+00, 1.81920694e+00]) + +} + +SPEAKERS_CONFIG['Dena_Simmons'] = { + 'median': np.ones(98) * 112, + 'scale_factor': 1, + 'mean': np.array([ 0. , -1.32671111, -1.70898239, -1.38148399, 1.2674879 , + 1.70429258, 1.83110168, 1.15629808, 0.67745725, 0.39139582, + 0.24751003, 0.17217772, 0.57686829, 0.30324958, 0.18916796, + 0.10490191, 0.51274994, 0.24934172, 0.14662317, 0.0681714 , + 0.42096946, 0.21571229, 0.13971116, 0.06791959, 0.36387298, + 0.22501558, 0.15998 , 0.09242384, -1.38843969, -1.33251132, + -1.2320224 , -1.12926197, -1.03795619, -1.31640134, -1.17839898, + -1.07546404, -0.98838937, -1.32385558, -1.15195077, -1.04205277, + -0.95839166, -1.320917 , -1.15347666, -1.05234124, -0.9717273 , + -1.30338945, -1.16452591, -1.08407723, -1.0169221 , 0. , + 0.63789484, 2.82622566, 2.40705967, 0.47388226, 2.65721488, + 2.95790198, 2.0125996 , 1.3014549 , 0.90448183, 0.68825752, + 0.57698081, 1.0032702 , 0.69087711, 0.61187128, 0.56986139, + 0.9954768 , 0.75530534, 0.70279596, 0.65725578, 0.97945787, + 0.81827698, 0.79789433, 0.73969878, 1.00439302, 0.91154909, + 0.87638059, 0.81035885, 2.23345794, 1.9957466 , 1.77898656, + 1.67939582, 1.5828782 , 1.77336911, 1.69359889, 1.68066421, + 1.66981155, 1.8993805 , 1.82307356, 1.81227444, 1.80010526, + 2.00969673, 1.94934976, 1.93507569, 1.92751947, 2.12643006, + 2.07491163, 2.0633099 , 2.04994315]), + 'std': np.array([1.00000000e-09, 4.08285886e-01, 5.37121096e-01, 8.71946563e-01, + 4.12282634e-01, 6.11762415e-01, 9.06978755e-01, 1.69966614e+00, + 2.00191040e+00, 2.13490451e+00, 2.21659421e+00, 2.26303688e+00, + 2.23270611e+00, 2.35017790e+00, 2.36805165e+00, 2.37394797e+00, + 2.29627659e+00, 2.35602394e+00, 2.34603797e+00, 2.33723647e+00, + 2.32351897e+00, 2.35254582e+00, 2.31999721e+00, 2.31027793e+00, + 2.31767244e+00, 2.32762186e+00, 2.30587210e+00, 2.30070455e+00, + 9.13129107e-01, 9.60813104e-01, 1.04666124e+00, 1.13543873e+00, + 1.22065216e+00, 1.19813460e+00, 1.32304847e+00, 1.36258205e+00, + 1.39395817e+00, 1.22366288e+00, 1.33356313e+00, 1.36087143e+00, + 1.38704473e+00, 1.22117060e+00, 1.31114679e+00, 1.33057436e+00, + 1.35115518e+00, 1.20316279e+00, 1.27333047e+00, 1.28827426e+00, + 1.30591601e+00, 1.00000000e-09, 2.24756100e-01, 5.35069110e-01, + 7.23929770e-01, 2.38824569e-01, 5.24184193e-01, 1.05334019e+00, + 1.96148946e+00, 2.20731111e+00, 2.26955660e+00, 2.34286927e+00, + 2.38498516e+00, 2.27035892e+00, 2.40544254e+00, 2.47068244e+00, + 2.52236063e+00, 2.36167783e+00, 2.47597575e+00, 2.52770214e+00, + 2.56720823e+00, 2.44301903e+00, 2.54469073e+00, 2.55992396e+00, + 2.59643755e+00, 2.50673760e+00, 2.57147923e+00, 2.58813160e+00, + 2.62636639e+00, 1.12222273e+00, 1.15805069e+00, 1.23323303e+00, + 1.27015104e+00, 1.32717458e+00, 1.25752732e+00, 1.33247522e+00, + 1.37077502e+00, 1.41721512e+00, 1.29912033e+00, 1.39896505e+00, + 1.43278090e+00, 1.46709798e+00, 1.37071898e+00, 1.45249912e+00, + 1.47480597e+00, 1.48532896e+00, 1.41428911e+00, 1.48859498e+00, + 1.48931973e+00, 1.50199060e+00]) + +} + +SPEAKERS_CONFIG['Stanley_McChrystal'] = { + 'median': np.ones(98) * 112, + 'scale_factor': 1, + 'mean': np.array([ 0. , -1.20970112, -1.78184855, -1.93372151, 1.39422362, + 2.01909743, 1.03556081, 0.92900821, 0.79465224, 0.60323883, + 0.42230403, 0.26172113, 0.62343882, 0.38623023, 0.23477015, + 0.10702492, 0.63293302, 0.36290843, 0.2094316 , 0.09788613, + 0.63374358, 0.38669451, 0.2599326 , 0.17109027, 0.63419738, + 0.44206407, 0.35158083, 0.2775307 , -1.95703567, -1.9335366 , + -1.89316227, -1.86755643, -1.84896573, -2.0412647 , -1.9840291 , + -1.89099935, -1.80674288, -2.07689652, -1.96354895, -1.83920887, + -1.73626386, -2.06131385, -1.94172925, -1.82392343, -1.72696778, + -2.02438723, -1.92271936, -1.83659787, -1.7586002 , 0. , + 0.85286796, 3.21208821, 3.1389567 , 0.71733128, 3.10551972, + 3.33381399, 3.34433628, 3.11881779, 2.96770498, 2.93275188, + 2.90898097, 3.14375969, 3.15930581, 3.15348179, 3.14225303, + 3.3489085 , 3.35445151, 3.32216601, 3.28633532, 3.50968649, + 3.51025893, 3.46797721, 3.42073258, 3.63848607, 3.63587839, + 3.60646414, 3.56999241, 3.10259301, 2.8681846 , 2.6774324 , + 2.58518526, 2.51778432, 2.74871926, 2.69924025, 2.68426765, + 2.67777918, 2.93371521, 2.89605158, 2.87105263, 2.84927583, + 3.09809173, 3.06464304, 3.03411583, 3.00795203, 3.24128544, + 3.21603032, 3.18817925, 3.15788871]), + 'std': np.array([1.00000000e-09, 6.49262945e-01, 1.15265273e+00, 1.52325942e+00, + 5.00654975e-01, 9.07370918e-01, 1.45175969e+00, 1.54025576e+00, + 1.61546518e+00, 1.72830315e+00, 1.84682229e+00, 1.95634381e+00, + 1.95036251e+00, 2.09267770e+00, 2.11774404e+00, 2.12942965e+00, + 1.95890000e+00, 2.06536535e+00, 2.05948187e+00, 2.04472432e+00, + 1.91815272e+00, 1.99793557e+00, 1.98195550e+00, 1.96677853e+00, + 1.86244242e+00, 1.91669392e+00, 1.90513448e+00, 1.89551484e+00, + 1.62394582e+00, 1.70430986e+00, 1.83799118e+00, 1.99389540e+00, + 2.13820744e+00, 2.00990809e+00, 2.21566844e+00, 2.30700221e+00, + 2.37657097e+00, 2.04624780e+00, 2.23858620e+00, 2.30560753e+00, + 2.35226104e+00, 2.03811007e+00, 2.20382601e+00, 2.24948351e+00, + 2.28412466e+00, 2.01050292e+00, 2.13725460e+00, 2.17221742e+00, + 2.19851700e+00, 1.00000000e-09, 1.68129592e-01, 5.63129923e-01, + 1.13407635e+00, 1.73321962e-01, 5.29865059e-01, 8.95386414e-01, + 9.90962575e-01, 1.01624645e+00, 1.07395111e+00, 1.15287721e+00, + 1.23239561e+00, 1.21158938e+00, 1.30583031e+00, 1.32448904e+00, + 1.33509263e+00, 1.24605084e+00, 1.31970168e+00, 1.31535178e+00, + 1.30455427e+00, 1.24163297e+00, 1.30388283e+00, 1.29229657e+00, + 1.28024563e+00, 1.21614511e+00, 1.26163158e+00, 1.25945481e+00, + 1.25456990e+00, 1.26279445e+00, 1.30584113e+00, 1.36841117e+00, + 1.44374881e+00, 1.52274841e+00, 1.50400765e+00, 1.60383518e+00, + 1.63139144e+00, 1.65014039e+00, 1.54531024e+00, 1.63375079e+00, + 1.64277599e+00, 1.64053881e+00, 1.55372427e+00, 1.63022147e+00, + 1.62872130e+00, 1.61127724e+00, 1.53556795e+00, 1.59828660e+00, + 1.60319835e+00, 1.59723441e+00]) + +} + +SPEAKERS_CONFIG['Seema_Bansal'] = { + 'median': np.ones(98) * 112, + 'scale_factor': 1, + 'mean': np.array([ 0. , -1.40953135, -1.90851615, -1.1531883 , 1.44919938, + 1.993433 , 1.47051017, 1.39639792, 1.32765061, 1.21316457, + 1.08938823, 0.98905291, 1.27548386, 1.09072056, 0.96863478, + 0.85344033, 1.25359116, 1.0307414 , 0.887757 , 0.77407451, + 1.21487216, 0.99988077, 0.86742446, 0.75709108, 1.17396474, + 0.99005752, 0.88815338, 0.79490474, -1.05741282, -0.93664287, + -0.75322569, -0.57658209, -0.42453932, -0.78240461, -0.54364271, + -0.40820871, -0.29851198, -0.79029464, -0.52799201, -0.38177942, + -0.26260613, -0.79234516, -0.5432291 , -0.41165139, -0.29490473, + -0.79046124, -0.58943856, -0.48262306, -0.38136404, 0. , + 0.61506936, 3.09323912, 2.89467654, 0.54501874, 2.94656141, + 2.93480347, 2.91236219, 2.72592696, 2.5981872 , 2.55901417, + 2.54632898, 2.74062642, 2.75860559, 2.77280026, 2.79269327, + 2.91267159, 2.93136204, 2.93232843, 2.92200398, 3.04722002, + 3.05892215, 3.04311074, 3.01863797, 3.15415044, 3.15434426, + 3.13635386, 3.11663994, 2.85637803, 2.66266767, 2.5632497 , + 2.56413399, 2.59042702, 2.66854899, 2.7294872 , 2.7615801 , + 2.79434248, 2.8482255 , 2.91126703, 2.93987713, 2.94857051, + 2.98922535, 3.04266765, 3.04979868, 3.04328226, 3.1035342 , + 3.13874592, 3.13732136, 3.12964445]), + 'std': np.array([1.00000000e-09, 2.40432657e-01, 3.81861330e-01, 6.68889073e-01, + 2.41572281e-01, 3.96190367e-01, 8.24438678e-01, 9.61948467e-01, + 1.04057227e+00, 1.16163324e+00, 1.28583940e+00, 1.40140262e+00, + 1.28707661e+00, 1.44135646e+00, 1.51372195e+00, 1.57362229e+00, + 1.29550974e+00, 1.43293224e+00, 1.49490956e+00, 1.53523475e+00, + 1.26977702e+00, 1.38949159e+00, 1.43642351e+00, 1.47502901e+00, + 1.23261071e+00, 1.33361496e+00, 1.36452820e+00, 1.39302142e+00, + 7.68801239e-01, 8.49738926e-01, 9.67093652e-01, 1.08972209e+00, + 1.19664286e+00, 1.08419133e+00, 1.21314377e+00, 1.25873963e+00, + 1.28698575e+00, 1.07886101e+00, 1.18819848e+00, 1.22021144e+00, + 1.24127938e+00, 1.04670053e+00, 1.13956001e+00, 1.16108672e+00, + 1.18151171e+00, 1.00467702e+00, 1.07310024e+00, 1.09010886e+00, + 1.11366143e+00, 1.00000000e-09, 1.33497211e-01, 3.49633532e-01, + 4.94801421e-01, 1.56663438e-01, 3.38162158e-01, 5.89630226e-01, + 7.95568691e-01, 8.05055045e-01, 8.51851592e-01, 8.96500886e-01, + 9.48170278e-01, 9.10056973e-01, 9.77295468e-01, 1.01845965e+00, + 1.05019127e+00, 9.58670046e-01, 1.00677398e+00, 1.03531330e+00, + 1.03949634e+00, 9.80801853e-01, 1.02083085e+00, 1.03000075e+00, + 1.02936744e+00, 9.81425929e-01, 1.01548932e+00, 1.01740709e+00, + 1.01944649e+00, 7.07490542e-01, 7.31150849e-01, 7.40099072e-01, + 7.72145384e-01, 8.24272891e-01, 8.21482204e-01, 8.65735170e-01, + 8.88676710e-01, 9.09200998e-01, 8.49419159e-01, 8.79509955e-01, + 9.02822212e-01, 9.20494048e-01, 8.55287212e-01, 8.88672975e-01, + 9.00292197e-01, 9.06352837e-01, 8.54730604e-01, 8.94125969e-01, + 8.97500743e-01, 9.03516486e-01]) + +} + +SPEAKERS_CONFIG['Tshering_Tobgay'] = { + 'median': np.ones(98) * 112, + 'scale_factor': 1, + 'mean': np.array([ 0. , -1.5426154 , -2.31055851, -1.78168447, 1.64907599, + 2.29680892, 1.61073857, 1.42986363, 1.18693051, 0.98660171, + 0.85957451, 0.74577074, 1.17117359, 0.98282705, 0.85049575, + 0.72490218, 1.24307146, 1.02107376, 0.87756626, 0.75344788, + 1.27921894, 1.0685921 , 0.93623641, 0.82077189, 1.30282797, + 1.12947448, 1.03005593, 0.93673451, -1.81556441, -1.70020793, + -1.56460705, -1.45253646, -1.34691992, -1.74461516, -1.55995082, + -1.43041308, -1.31787137, -1.79063659, -1.55537355, -1.41291994, + -1.29170835, -1.80852066, -1.5812527 , -1.44947227, -1.34318587, + -1.8058097 , -1.62763156, -1.52367865, -1.43320964, 0. , + 0.85148611, 3.19562375, 3.72047072, 0.70480475, 3.24204312, + 4.3068104 , 4.1860717 , 3.96398473, 3.867642 , 3.92277685, + 3.96967226, 4.03891626, 4.17569858, 4.20448118, 4.2188033 , + 4.2038805 , 4.34045895, 4.33994614, 4.3314275 , 4.33551951, + 4.44997151, 4.43376398, 4.41530341, 4.45266256, 4.53873974, + 4.52403242, 4.50375821, 3.67509985, 3.4741722 , 3.36017855, + 3.37765579, 3.41041793, 3.47782993, 3.55789184, 3.59480922, + 3.62360585, 3.69043964, 3.76242246, 3.76632635, 3.76283129, + 3.85404474, 3.90182728, 3.88795795, 3.87706907, 3.99157181, + 4.01390115, 3.99955427, 3.98475352]), + 'std': np.array([1.00000000e-09, 6.29667454e-01, 9.89894273e-01, 1.39412690e+00, + 5.72887526e-01, 8.87518427e-01, 1.35181317e+00, 1.65282590e+00, + 1.77265651e+00, 1.86443568e+00, 1.93362752e+00, 2.01223538e+00, + 2.01260581e+00, 2.10649119e+00, 2.14228354e+00, 2.17134311e+00, + 2.04381182e+00, 2.11418754e+00, 2.13057464e+00, 2.13954710e+00, + 2.04381299e+00, 2.10130644e+00, 2.10473152e+00, 2.10798706e+00, + 2.02679342e+00, 2.06511013e+00, 2.06457024e+00, 2.06565255e+00, + 1.47203047e+00, 1.54125198e+00, 1.65902661e+00, 1.79999631e+00, + 1.94209778e+00, 1.83044148e+00, 2.01263617e+00, 2.05557428e+00, + 2.07826273e+00, 1.84389937e+00, 1.99992039e+00, 2.01676671e+00, + 2.01714896e+00, 1.81592662e+00, 1.94881638e+00, 1.95016457e+00, + 1.94064188e+00, 1.77283836e+00, 1.87492038e+00, 1.87631139e+00, + 1.87344791e+00, 1.00000000e-09, 2.40368011e-01, 8.19167921e-01, + 1.50955020e+00, 2.09866833e-01, 7.96096084e-01, 1.56895385e+00, + 2.12358137e+00, 2.35018353e+00, 2.52554899e+00, 2.63411732e+00, + 2.75257932e+00, 2.56750224e+00, 2.69839674e+00, 2.77831130e+00, + 2.84698410e+00, 2.59480877e+00, 2.69436907e+00, 2.76153510e+00, + 2.82755178e+00, 2.59859759e+00, 2.69057305e+00, 2.74731597e+00, + 2.80722435e+00, 2.58245382e+00, 2.64833506e+00, 2.69239175e+00, + 2.74168237e+00, 1.87005704e+00, 1.92607763e+00, 2.02284846e+00, + 2.11035361e+00, 2.22433129e+00, 2.10774473e+00, 2.25620209e+00, + 2.31478256e+00, 2.35546283e+00, 2.11815506e+00, 2.26387858e+00, + 2.32062386e+00, 2.35587353e+00, 2.13386470e+00, 2.27017833e+00, + 2.31148031e+00, 2.32671453e+00, 2.13707704e+00, 2.26067909e+00, + 2.29110446e+00, 2.30383057e+00]) + +} + + +SPEAKERS_CONFIG['molly_winter'] = { + 'median': np.ones(98) * 112, + 'scale_factor': 1, + 'mean': np.array([ 0. , -1.40417455, -1.93915965, -2.05325684, 1.37072822, + 1.75503212, 1.64890912, 1.62898026, 1.61529551, 1.57606879, + 1.53174505, 1.49379764, 1.73160648, 1.66843796, 1.59464609, + 1.51958521, 1.72460027, 1.61225515, 1.51997087, 1.44381198, + 1.68049785, 1.56653305, 1.47839914, 1.40486713, 1.62913535, + 1.54168091, 1.470533 , 1.40991999, -2.06556883, -2.06104822, + -2.04168945, -2.03495656, -2.02745634, -2.21403956, -2.19367452, + -2.14885706, -2.10818925, -2.2305437 , -2.16783652, -2.10657543, + -2.0526853 , -2.20319322, -2.13547311, -2.07614425, -2.02157022, + -2.1628595 , -2.10670246, -2.06008777, -2.0197715 , 0. , + 0.59925777, 2.83315462, 2.26691744, 0.56002944, 2.85605978, + 2.497442 , 2.45031315, 2.25846407, 2.12119046, 2.08862409, + 2.08203738, 2.22391203, 2.22765974, 2.22629016, 2.22243306, + 2.37273701, 2.36432486, 2.33861217, 2.31751521, 2.49075797, + 2.47221082, 2.43265099, 2.4029547 , 2.58964675, 2.56323913, + 2.52757512, 2.50045151, 2.17719543, 1.9727938 , 1.78831173, + 1.69150947, 1.63451964, 1.81613529, 1.74854777, 1.7283022 , + 1.71450909, 1.93796868, 1.87753457, 1.84878146, 1.82512106, + 2.05230744, 1.99368792, 1.95919174, 1.93252213, 2.15735083, + 2.10447618, 2.07407801, 2.05188586]), + 'std': np.array([1.00000000e-09, 1.77210102e-01, 6.16759879e-01, 1.14568304e+00, + 1.41893505e-01, 4.67621652e-01, 9.19436155e-01, 1.01205502e+00, + 1.09217049e+00, 1.22211208e+00, 1.34863925e+00, 1.47089480e+00, + 1.29588885e+00, 1.45213840e+00, 1.54153015e+00, 1.61852207e+00, + 1.30685607e+00, 1.45794103e+00, 1.53150642e+00, 1.58872360e+00, + 1.29006892e+00, 1.42310414e+00, 1.47713738e+00, 1.51869987e+00, + 1.26865214e+00, 1.37082563e+00, 1.41392846e+00, 1.44177077e+00, + 1.24907833e+00, 1.32546870e+00, 1.45563508e+00, 1.58649984e+00, + 1.70378807e+00, 1.53336475e+00, 1.69966754e+00, 1.79795421e+00, + 1.87362518e+00, 1.55728466e+00, 1.72177080e+00, 1.80624456e+00, + 1.87230210e+00, 1.55647078e+00, 1.70262794e+00, 1.77069326e+00, + 1.82885038e+00, 1.54733533e+00, 1.66035573e+00, 1.71411779e+00, + 1.75613996e+00, 1.00000000e-09, 1.19070227e-01, 5.93853115e-01, + 9.61506585e-01, 1.16451612e-01, 4.76599584e-01, 7.35670754e-01, + 7.99402145e-01, 8.28434742e-01, 8.86274968e-01, 9.59245398e-01, + 1.03640790e+00, 9.90457858e-01, 1.07613285e+00, 1.10641709e+00, + 1.12709832e+00, 1.00242792e+00, 1.06816731e+00, 1.08105868e+00, + 1.09318000e+00, 9.90869272e-01, 1.03941128e+00, 1.04598950e+00, + 1.05211029e+00, 9.67483332e-01, 1.00097688e+00, 1.00679966e+00, + 1.01222696e+00, 1.02475249e+00, 1.04372773e+00, 1.10235950e+00, + 1.17097429e+00, 1.25204050e+00, 1.20267931e+00, 1.30213758e+00, + 1.34428386e+00, 1.37536571e+00, 1.23544817e+00, 1.32515264e+00, + 1.35093169e+00, 1.36774448e+00, 1.24043670e+00, 1.31546474e+00, + 1.32734204e+00, 1.33299042e+00, 1.22733520e+00, 1.28411209e+00, + 1.29080733e+00, 1.29215459e+00]) +} + +SPEAKERS_CONFIG['ALL'] = { + 'median': np.ones(98) * 112, + 'scale_factor': 1, + 'mean': np.array([ 0. , -1.30300177, -1.86055032, -1.57118029, 1.3657266 , + 1.92831773, 1.27756316, 1.16861636, 1.07933715, 0.9506417 , + 0.82515075, 0.71723342, 0.98796002, 0.80038158, 0.67221937, + 0.56251367, 0.98125758, 0.75949434, 0.62040493, 0.51636528, + 0.96027632, 0.75239721, 0.62975113, 0.53927769, 0.937988 , + 0.76976597, 0.68029213, 0.60453242, -1.57581202, -1.53029104, + -1.44812959, -1.36786496, -1.30067461, -1.55515339, -1.43382027, + -1.32774391, -1.23772878, -1.5686388 , -1.40122445, -1.27958344, + -1.18189403, -1.54969714, -1.38632245, -1.27630342, -1.18691285, + -1.51919196, -1.38642254, -1.30373232, -1.23290295, 0. , + 0.71123098, 3.02674416, 2.91699844, 0.64559599, 2.97558319, + 3.0200536 , 2.93745739, 2.72236932, 2.56640152, 2.51889404, + 2.48098966, 2.71813759, 2.71314216, 2.69923703, 2.68668115, + 2.89014913, 2.88411348, 2.8491349 , 2.81780958, 3.02629419, + 3.01506254, 2.97198885, 2.9293096 , 3.13306825, 3.11997394, + 3.09019367, 3.05718596, 2.82000627, 2.59470968, 2.42605315, + 2.3668938 , 2.33373695, 2.51350406, 2.49060967, 2.48313634, + 2.47856824, 2.68452928, 2.66356618, 2.64253278, 2.61957967, + 2.82552995, 2.80440706, 2.7745475 , 2.74672361, 2.94719774, + 2.92540835, 2.89896933, 2.86841935]), + 'std': np.array([1.00000000e-09, 5.94853211e-01, 9.39004804e-01, 1.30696169e+00, + 5.13600409e-01, 8.33428091e-01, 1.24559872e+00, 1.39153990e+00, + 1.46482703e+00, 1.57215549e+00, 1.68515334e+00, 1.79094924e+00, + 1.74087731e+00, 1.87654111e+00, 1.92098409e+00, 1.95011993e+00, + 1.75183954e+00, 1.85908747e+00, 1.88118825e+00, 1.89038185e+00, + 1.72483854e+00, 1.80798182e+00, 1.81824030e+00, 1.82156299e+00, + 1.68453675e+00, 1.74706163e+00, 1.74940825e+00, 1.75118403e+00, + 1.39398302e+00, 1.47352232e+00, 1.60205166e+00, 1.74801788e+00, + 1.88070326e+00, 1.75880872e+00, 1.94173513e+00, 2.01138420e+00, + 2.05996973e+00, 1.78345552e+00, 1.94760222e+00, 1.99331582e+00, + 2.02361450e+00, 1.76915133e+00, 1.90813772e+00, 1.93698920e+00, + 1.95971143e+00, 1.73766796e+00, 1.84338938e+00, 1.86596427e+00, + 1.88428898e+00, 1.00000000e-09, 2.01288803e-01, 5.61500604e-01, + 9.87625296e-01, 1.79270570e-01, 5.14485413e-01, 8.81159823e-01, + 1.19507623e+00, 1.22634755e+00, 1.28659387e+00, 1.33841801e+00, + 1.40840380e+00, 1.36690551e+00, 1.44821245e+00, 1.48754427e+00, + 1.51335860e+00, 1.41121023e+00, 1.46198667e+00, 1.49705702e+00, + 1.50381832e+00, 1.42659163e+00, 1.47976093e+00, 1.50377357e+00, + 1.51654021e+00, 1.44594775e+00, 1.48942057e+00, 1.49463393e+00, + 1.50021580e+00, 1.33082047e+00, 1.36775159e+00, 1.42347223e+00, + 1.48050462e+00, 1.54522074e+00, 1.52570205e+00, 1.61165322e+00, + 1.63691128e+00, 1.65428311e+00, 1.56326930e+00, 1.63907141e+00, + 1.65163394e+00, 1.65592740e+00, 1.57865120e+00, 1.64335108e+00, + 1.64701618e+00, 1.63824454e+00, 1.57316675e+00, 1.63089944e+00, + 1.63366642e+00, 1.63141875e+00]) + +} diff --git a/data_utils/dataloader_torch.py b/data_utils/dataloader_torch.py new file mode 100644 index 0000000000000000000000000000000000000000..dc0bf81c1ccc86580fdd013c0b3e81fae190a65d --- /dev/null +++ b/data_utils/dataloader_torch.py @@ -0,0 +1,279 @@ +import sys +import os +sys.path.append(os.getcwd()) +import os +from tqdm import tqdm +from data_utils.utils import * +import torch.utils.data as data +from data_utils.mesh_dataset import SmplxDataset +from transformers import Wav2Vec2Processor + + +class MultiVidData(): + def __init__(self, + data_root, + speakers, + split='train', + limbscaling=False, + normalization=False, + norm_method='new', + split_trans_zero=False, + num_frames=25, + num_pre_frames=25, + num_generate_length=None, + aud_feat_win_size=None, + aud_feat_dim=64, + feat_method='mel_spec', + context_info=False, + smplx=False, + audio_sr=16000, + convert_to_6d=False, + expression=False, + config=None + ): + self.data_root = data_root + self.speakers = speakers + self.split = split + if split == 'pre': + self.split = 'train' + self.norm_method=norm_method + self.normalization = normalization + self.limbscaling = limbscaling + self.convert_to_6d = convert_to_6d + self.num_frames=num_frames + self.num_pre_frames=num_pre_frames + if num_generate_length is None: + self.num_generate_length = num_frames + else: + self.num_generate_length = num_generate_length + self.split_trans_zero=split_trans_zero + + dataset = SmplxDataset + + if self.split_trans_zero: + self.trans_dataset_list = [] + self.zero_dataset_list = [] + else: + self.all_dataset_list = [] + self.dataset={} + self.complete_data=[] + self.config=config + load_mode=self.config.dataset_load_mode + + ######################load with pickle file + if load_mode=='pickle': + import pickle + import subprocess + + # store_file_path='/tmp/store.pkl' + # cp /is/cluster/scratch/hyi/ExpressiveBody/SMPLifyX4/scripts/store.pkl /tmp/store.pkl + # subprocess.run(f'cp /is/cluster/scratch/hyi/ExpressiveBody/SMPLifyX4/scripts/store.pkl {store_file_path}',shell=True) + + # f = open(self.config.store_file_path, 'rb+') + f = open(self.split+config.Data.pklname, 'rb+') + self.dataset=pickle.load(f) + f.close() + for key in self.dataset: + self.complete_data.append(self.dataset[key].complete_data) + ######################load with pickle file + + ######################load with a csv file + elif load_mode=='csv': + + # 这里从我的一个code文件夹导入的,后续再完善进来 + try: + sys.path.append(self.config.config_root_path) + from config import config_path + from csv_parser import csv_parse + + except ImportError as e: + print(f'err: {e}') + raise ImportError('config root path error...') + + + for speaker_name in self.speakers: + # df_intervals=pd.read_csv(self.config.voca_csv_file_path) + df_intervals=None + df_intervals=df_intervals[df_intervals['speaker']==speaker_name] + df_intervals = df_intervals[df_intervals['dataset'] == self.split] + + print(f'speaker {speaker_name} train interval length: {len(df_intervals)}') + for iter_index, (_, interval) in tqdm( + (enumerate(df_intervals.iterrows())),desc=f'load {speaker_name}' + ): + + ( + interval_index, + interval_speaker, + interval_video_fn, + interval_id, + + start_time, + end_time, + duration_time, + start_time_10, + over_flow_flag, + short_dur_flag, + + big_video_dir, + small_video_dir_name, + speaker_video_path, + + voca_basename, + json_basename, + wav_basename, + voca_top_clip_path, + voca_json_clip_path, + voca_wav_clip_path, + + audio_output_fn, + image_output_path, + pifpaf_output_path, + mp_output_path, + op_output_path, + deca_output_path, + pixie_output_path, + cam_output_path, + ours_output_path, + merge_output_path, + multi_output_path, + gt_output_path, + ours_images_path, + pkl_fil_path, + )=csv_parse(interval) + + if not os.path.exists(pkl_fil_path) or not os.path.exists(audio_output_fn): + continue + + key=f'{interval_video_fn}/{small_video_dir_name}' + self.dataset[key] = dataset( + data_root=pkl_fil_path, + speaker=speaker_name, + audio_fn=audio_output_fn, + audio_sr=audio_sr, + fps=num_frames, + feat_method=feat_method, + audio_feat_dim=aud_feat_dim, + train=(self.split == 'train'), + load_all=True, + split_trans_zero=self.split_trans_zero, + limbscaling=self.limbscaling, + num_frames=self.num_frames, + num_pre_frames=self.num_pre_frames, + num_generate_length=self.num_generate_length, + audio_feat_win_size=aud_feat_win_size, + context_info=context_info, + convert_to_6d=convert_to_6d, + expression=expression, + config=self.config + ) + self.complete_data.append(self.dataset[key].complete_data) + ######################load with a csv file + + ######################origin load method + elif load_mode=='json': + + # if self.split == 'train': + # import pickle + # f = open('store.pkl', 'rb+') + # self.dataset=pickle.load(f) + # f.close() + # for key in self.dataset: + # self.complete_data.append(self.dataset[key].complete_data) + # else:https://pytorch-tutorial-assets.s3.amazonaws.com/VOiCES_devkit/source-16k/train/sp0307/Lab41-SRI-VOiCES-src-sp0307-ch127535-sg0042.wav + # if config.Model.model_type == 'face': + am = Wav2Vec2Processor.from_pretrained("vitouphy/wav2vec2-xls-r-300m-phoneme") + am_sr = 16000 + # else: + # am, am_sr = None, None + for speaker_name in self.speakers: + speaker_root = os.path.join(self.data_root, speaker_name) + + videos=[v for v in os.listdir(speaker_root) ] + print(videos) + + haode = huaide = 0 + + for vid in tqdm(videos, desc="Processing training data of {}......".format(speaker_name)): + source_vid=vid + # vid_pth=os.path.join(speaker_root, source_vid, 'images/half', self.split) + vid_pth = os.path.join(speaker_root, source_vid, self.split) + if smplx == 'pose': + seqs = [s for s in os.listdir(vid_pth) if (s.startswith('clip'))] + else: + try: + seqs = [s for s in os.listdir(vid_pth)] + except: + continue + + for s in seqs: + seq_root=os.path.join(vid_pth, s) + key = seq_root # correspond to clip****** + audio_fname = os.path.join(speaker_root, source_vid, self.split, s, '%s.wav' % (s)) + motion_fname = os.path.join(speaker_root, source_vid, self.split, s, '%s.pkl' % (s)) + if not os.path.isfile(audio_fname) or not os.path.isfile(motion_fname): + huaide = huaide + 1 + continue + + self.dataset[key]=dataset( + data_root=seq_root, + speaker=speaker_name, + motion_fn=motion_fname, + audio_fn=audio_fname, + audio_sr=audio_sr, + fps=num_frames, + feat_method=feat_method, + audio_feat_dim=aud_feat_dim, + train=(self.split=='train'), + load_all=True, + split_trans_zero=self.split_trans_zero, + limbscaling=self.limbscaling, + num_frames=self.num_frames, + num_pre_frames=self.num_pre_frames, + num_generate_length=self.num_generate_length, + audio_feat_win_size=aud_feat_win_size, + context_info=context_info, + convert_to_6d=convert_to_6d, + expression=expression, + config=self.config, + am=am, + am_sr=am_sr, + whole_video=config.Data.whole_video + ) + self.complete_data.append(self.dataset[key].complete_data) + haode = haode + 1 + print("huaide:{}, haode:{}".format(huaide, haode)) + import pickle + + f = open(self.split+config.Data.pklname, 'wb') + pickle.dump(self.dataset, f) + f.close() + ######################origin load method + + self.complete_data=np.concatenate(self.complete_data, axis=0) + + # assert self.complete_data.shape[-1] == (12+21+21)*2 + self.normalize_stats = {} + + self.data_mean = None + self.data_std = None + + def get_dataset(self): + self.normalize_stats['mean'] = self.data_mean + self.normalize_stats['std'] = self.data_std + + for key in list(self.dataset.keys()): + if self.dataset[key].complete_data.shape[0] < self.num_generate_length: + continue + self.dataset[key].num_generate_length = self.num_generate_length + self.dataset[key].get_dataset(self.normalization, self.normalize_stats, self.split) + self.all_dataset_list.append(self.dataset[key].all_dataset) + + if self.split_trans_zero: + self.trans_dataset = data.ConcatDataset(self.trans_dataset_list) + self.zero_dataset = data.ConcatDataset(self.zero_dataset_list) + else: + self.all_dataset = data.ConcatDataset(self.all_dataset_list) + + + diff --git a/data_utils/dataset_preprocess.py b/data_utils/dataset_preprocess.py new file mode 100644 index 0000000000000000000000000000000000000000..72fca71efa0e86d08b255d1250622f3e46d59a35 --- /dev/null +++ b/data_utils/dataset_preprocess.py @@ -0,0 +1,170 @@ +import os +import pickle +from tqdm import tqdm +import shutil +import torch +import numpy as np +import librosa +import random + +speakers = ['seth', 'conan', 'oliver', 'chemistry'] +data_root = "../ExpressiveWholeBodyDatasetv1.0/" +split = 'train' + + + +def split_list(full_list,shuffle=False,ratio=0.2): + n_total = len(full_list) + offset_0 = int(n_total * ratio) + offset_1 = int(n_total * ratio * 2) + if n_total==0 or offset_1<1: + return [],full_list + if shuffle: + random.shuffle(full_list) + sublist_0 = full_list[:offset_0] + sublist_1 = full_list[offset_0:offset_1] + sublist_2 = full_list[offset_1:] + return sublist_0, sublist_1, sublist_2 + + +def moveto(list, file): + for f in list: + before, after = '/'.join(f.split('/')[:-1]), f.split('/')[-1] + new_path = os.path.join(before, file) + new_path = os.path.join(new_path, after) + # os.makedirs(new_path) + # os.path.isdir(new_path) + # shutil.move(f, new_path) + + #转移到新目录 + shutil.copytree(f, new_path) + #删除原train里的文件 + shutil.rmtree(f) + return None + + +def read_pkl(data): + betas = np.array(data['betas']) + + jaw_pose = np.array(data['jaw_pose']) + leye_pose = np.array(data['leye_pose']) + reye_pose = np.array(data['reye_pose']) + global_orient = np.array(data['global_orient']).squeeze() + body_pose = np.array(data['body_pose_axis']) + left_hand_pose = np.array(data['left_hand_pose']) + right_hand_pose = np.array(data['right_hand_pose']) + + full_body = np.concatenate( + (jaw_pose, leye_pose, reye_pose, global_orient, body_pose, left_hand_pose, right_hand_pose), axis=1) + + expression = np.array(data['expression']) + full_body = np.concatenate((full_body, expression), axis=1) + + if (full_body.shape[0] < 90) or (torch.isnan(torch.from_numpy(full_body)).sum() > 0): + return 1 + else: + return 0 + + +for speaker_name in speakers: + speaker_root = os.path.join(data_root, speaker_name) + + videos = [v for v in os.listdir(speaker_root)] + print(videos) + + haode = huaide = 0 + total_seqs = [] + + for vid in tqdm(videos, desc="Processing training data of {}......".format(speaker_name)): + # for vid in videos: + source_vid = vid + vid_pth = os.path.join(speaker_root, source_vid) + # vid_pth = os.path.join(speaker_root, source_vid, 'images/half', split) + t = os.path.join(speaker_root, source_vid, 'test') + v = os.path.join(speaker_root, source_vid, 'val') + + # if os.path.exists(t): + # shutil.rmtree(t) + # if os.path.exists(v): + # shutil.rmtree(v) + try: + seqs = [s for s in os.listdir(vid_pth)] + except: + continue + # if len(seqs) == 0: + # shutil.rmtree(os.path.join(speaker_root, source_vid)) + # None + for s in seqs: + quality = 0 + total_seqs.append(os.path.join(vid_pth,s)) + seq_root = os.path.join(vid_pth, s) + key = seq_root # correspond to clip****** + audio_fname = os.path.join(speaker_root, source_vid, s, '%s.wav' % (s)) + + # delete the data without audio or the audio file could not be read + if os.path.isfile(audio_fname): + try: + audio = librosa.load(audio_fname) + except: + # print(key) + shutil.rmtree(key) + huaide = huaide + 1 + continue + else: + huaide = huaide + 1 + # print(key) + shutil.rmtree(key) + continue + + # check motion file + motion_fname = os.path.join(speaker_root, source_vid, s, '%s.pkl' % (s)) + try: + f = open(motion_fname, 'rb+') + except: + shutil.rmtree(key) + huaide = huaide + 1 + continue + + data = pickle.load(f) + w = read_pkl(data) + f.close() + quality = quality + w + + if w == 1: + shutil.rmtree(key) + # print(key) + huaide = huaide + 1 + continue + + haode = haode + 1 + + print("huaide:{}, haode:{}, total_seqs:{}".format(huaide, haode, total_seqs.__len__())) + +for speaker_name in speakers: + speaker_root = os.path.join(data_root, speaker_name) + + videos = [v for v in os.listdir(speaker_root)] + print(videos) + + haode = huaide = 0 + total_seqs = [] + + for vid in tqdm(videos, desc="Processing training data of {}......".format(speaker_name)): + # for vid in videos: + source_vid = vid + vid_pth = os.path.join(speaker_root, source_vid) + try: + seqs = [s for s in os.listdir(vid_pth)] + except: + continue + for s in seqs: + quality = 0 + total_seqs.append(os.path.join(vid_pth, s)) + print("total_seqs:{}".format(total_seqs.__len__())) + # split the dataset + test_list, val_list, train_list = split_list(total_seqs, True, 0.1) + print(len(test_list), len(val_list), len(train_list)) + moveto(train_list, 'train') + moveto(test_list, 'test') + moveto(val_list, 'val') + diff --git a/data_utils/get_j.py b/data_utils/get_j.py new file mode 100644 index 0000000000000000000000000000000000000000..39bdd67459c3b788beacba87ce8dd496877d4d71 --- /dev/null +++ b/data_utils/get_j.py @@ -0,0 +1,51 @@ +import torch + + +def to3d(poses, config): + if config.Data.pose.convert_to_6d: + if config.Data.pose.expression: + poses_exp = poses[:, -100:] + poses = poses[:, :-100] + + poses = poses.reshape(poses.shape[0], -1, 5) + sin, cos = poses[:, :, 3], poses[:, :, 4] + pose_angle = torch.atan2(sin, cos) + poses = (poses[:, :, :3] * pose_angle.unsqueeze(dim=-1)).reshape(poses.shape[0], -1) + + if config.Data.pose.expression: + poses = torch.cat([poses, poses_exp], dim=-1) + return poses + + +def get_joint(smplx_model, betas, pred): + joint = smplx_model(betas=betas.repeat(pred.shape[0], 1), + expression=pred[:, 165:265], + jaw_pose=pred[:, 0:3], + leye_pose=pred[:, 3:6], + reye_pose=pred[:, 6:9], + global_orient=pred[:, 9:12], + body_pose=pred[:, 12:75], + left_hand_pose=pred[:, 75:120], + right_hand_pose=pred[:, 120:165], + return_verts=True)['joints'] + return joint + + +def get_joints(smplx_model, betas, pred): + if len(pred.shape) == 3: + B = pred.shape[0] + x = 4 if B>= 4 else B + T = pred.shape[1] + pred = pred.reshape(-1, 265) + smplx_model.batch_size = L = T * x + + times = pred.shape[0] // smplx_model.batch_size + joints = [] + for i in range(times): + joints.append(get_joint(smplx_model, betas, pred[i*L:(i+1)*L])) + joints = torch.cat(joints, dim=0) + joints = joints.reshape(B, T, -1, 3) + else: + smplx_model.batch_size = pred.shape[0] + joints = get_joint(smplx_model, betas, pred) + return joints \ No newline at end of file diff --git a/data_utils/hand_component.json b/data_utils/hand_component.json new file mode 100644 index 0000000000000000000000000000000000000000..2f75e6a25f484154d85eb33952902281cc4c357a --- /dev/null +++ b/data_utils/hand_component.json @@ -0,0 +1,4236 @@ +{ + "left":[ + [ + -0.0065002827905118465, + -0.043443795293569565, + 0.10452164709568024, + -0.042900316417217255, + -0.011481651104986668, + 0.3076476752758026, + 0.014990712516009808, + 0.06449854373931885, + 0.11080695688724518, + 0.18507879972457886, + -0.07475540786981583, + 0.37530016899108887, + -0.016014879569411278, + -0.029576366767287254, + 0.3004041910171509, + -0.014458670280873775, + 0.032006122171878815, + 0.23033395409584045, + 0.5604633688926697, + 0.10498113185167313, + 0.5397933721542358, + -0.048614535480737686, + 0.057744402438402176, + 0.21925750374794006, + 0.21002037823200226, + -0.02164149098098278, + 0.1638900339603424, + 0.2000914216041565, + 0.07326802611351013, + 0.5309985876083374, + 0.12327712029218674, + 0.06226075068116188, + 0.22608619928359985, + 0.05398350954055786, + -0.01169112604111433, + 0.23691436648368835, + -0.12490949779748917, + -0.09014706313610077, + -0.034181635826826096, + 0.1123010590672493, + 0.1116921454668045, + 0.008618329651653767, + -0.07716584950685501, + -0.026859739795327187, + -0.040021784603595734 + ], + [ + -0.061107732355594635, + 0.028760293498635292, + 0.0852101594209671, + -0.055592115968465805, + 0.024639930576086044, + -0.13902151584625244, + 0.09642048180103302, + -0.000601884035859257, + -0.28239816427230835, + 0.04764389991760254, + 0.022113285958766937, + 0.06034102290868759, + -0.04805706813931465, + -0.05859246477484703, + -0.22738094627857208, + -0.05480566620826721, + 0.014107032679021358, + -0.2478177398443222, + 0.2947772443294525, + -0.031076615676283836, + 0.14423705637454987, + -0.43971437215805054, + -0.12156734615564346, + -0.17276380956172943, + 0.0023738087620586157, + -0.12386418879032135, + -0.3269002437591553, + 0.078597791492939, + 0.009026134386658669, + 0.14061298966407776, + -0.10282959043979645, + -0.07330198585987091, + -0.36222153902053833, + -0.04677493870258331, + -0.004285688046365976, + -0.21868270635604858, + -0.28034910559654236, + -0.18166810274124146, + 0.14600247144699097, + 0.05381167680025101, + 0.055320415645837784, + -0.21162423491477966, + -0.033887237310409546, + 0.1577225774526596, + 0.10935050249099731 + ], + [ + -0.1064336821436882, + 0.09303449094295502, + -0.3014002740383148, + 0.14722639322280884, + 0.0010746014304459095, + -0.1641043722629547, + -0.06178005784749985, + -0.05960696563124657, + 0.16366051137447357, + -0.07701944559812546, + -0.026868093758821487, + -0.4157949686050415, + 0.03778314217925072, + 0.00812435895204544, + 0.19374501705169678, + -0.002125783823430538, + 0.009603243321180344, + 0.05753939226269722, + 0.25259846448898315, + -0.1577707976102829, + 0.07957044243812561, + -0.1732443869113922, + 0.05973230302333832, + 0.1290857344865799, + 0.12797924876213074, + -0.04824599251151085, + -0.04825138673186302, + 0.0992354080080986, + -0.13283026218414307, + -0.20355944335460663, + -0.06458800286054611, + -0.06507676094770432, + 0.20480415225028992, + -0.02290191687643528, + 0.08383756130933762, + 0.04287495091557503, + -0.13255465030670166, + -0.01144093181937933, + 0.0876108929514885, + 0.004888252820819616, + -0.0881739929318428, + -0.10281384736299515, + -0.0422121025621891, + -0.0601053312420845, + 0.15104496479034424 + ], + [ + -0.02684294432401657, + -0.009411846287548542, + 0.20996080338954926, + -0.08695130795240402, + 0.051755040884017944, + 0.2733485996723175, + 0.11699758470058441, + 0.04745694622397423, + -0.028511589393019676, + 0.009152330458164215, + 0.04008851200342178, + 0.006335575599223375, + 0.024406949058175087, + -0.0012957986909896135, + 0.11565860360860825, + -0.00551746366545558, + 0.02275954745709896, + 0.005480325315147638, + -0.0604497529566288, + -0.0009865796891972423, + -0.17491832375526428, + 0.09513134509325027, + -0.0022878695745021105, + 0.04317467287182808, + -0.042537592351436615, + 0.01697908714413643, + 0.053470633924007416, + -0.01982058584690094, + 0.009610535576939583, + -0.13786230981349945, + 0.002854656893759966, + -0.008014212362468243, + 0.07683318108320236, + 0.01611691154539585, + 0.01680651679635048, + 0.025259772315621376, + -0.24394449591636658, + -0.09612732380628586, + 0.1807442307472229, + -0.022753261029720306, + 0.01281095203012228, + -0.2177630364894867, + -0.10621364414691925, + -0.0392993800342083, + 0.28347688913345337 + ], + [ + 0.016318397596478462, + -0.03648126870393753, + 0.05039751157164574, + -0.025695865973830223, + -0.00026572769274935126, + 0.21002613008022308, + 0.0035516363568603992, + 0.04446924105286598, + 0.14934365451335907, + -0.04689402133226395, + -0.013387766666710377, + -0.016596781089901924, + 0.015022682957351208, + 0.027603743597865105, + -0.011148272082209587, + 0.013118156231939793, + 0.0022309001069515944, + 0.07036174833774567, + 0.02704538218677044, + -0.07914850860834122, + 0.049424465745687485, + -0.15908393263816833, + 0.004625978413969278, + -0.11610068380832672, + 0.016215333715081215, + -0.014706678688526154, + -0.08644839376211166, + -0.04509324952960014, + -0.032722990959882736, + -0.08445950597524643, + -0.018176842480897903, + -0.008074218407273293, + -0.15422053635120392, + 0.014123921282589436, + -0.006887519732117653, + 0.028760502114892006, + 0.062480904161930084, + 0.10739129781723022, + -0.025675108656287193, + -0.02488124929368496, + 0.009264294058084488, + 0.09507975727319717, + -0.060702573508024216, + -0.16679802536964417, + 0.01834101229906082 + ], + [ + -0.006860456429421902, + 0.03430275619029999, + 0.10541100054979324, + -0.02925351820886135, + -0.008854901418089867, + -0.11257459968328476, + 0.027404753491282463, + 0.005477671977132559, + -0.08789193630218506, + -0.057081662118434906, + 0.023918181657791138, + -0.02883325144648552, + -0.0057859825901687145, + 0.027394162490963936, + -0.20066726207733154, + 0.017585940659046173, + -0.004243167582899332, + -0.01484750583767891, + 0.06808744370937347, + 0.011725028976798058, + 0.0977899506688118, + 0.08463256806135178, + 0.060994572937488556, + 0.06084435060620308, + 0.007171604782342911, + 0.027576690539717674, + 0.05295417457818985, + 0.017242006957530975, + -0.02075306512415409, + -0.031001586467027664, + -0.004191653337329626, + 0.009333438239991665, + 0.02929551713168621, + 0.011278285644948483, + 0.010517350398004055, + 0.0384749099612236, + -0.020004993304610252, + 0.07694586366415024, + 0.019894175231456757, + -0.025658152997493744, + 0.031197229400277138, + 0.06406533718109131, + -0.09038729965686798, + -0.159688338637352, + 0.07271291315555573 + ], + [ + -0.003293597837910056, + 0.020608481019735336, + 0.17736975848674774, + -0.06540100276470184, + 0.013043813407421112, + -0.006177311297506094, + 0.045163240283727646, + 0.012492994777858257, + -0.0917729064822197, + -0.05963604897260666, + 0.032804813235998154, + -0.018690932542085648, + 0.024753771722316742, + 0.01309316884726286, + 0.052925147116184235, + 0.009612965397536755, + 0.014142473228275776, + 0.030030885711312294, + 0.03968771547079086, + -0.056855760514736176, + -0.024758173152804375, + -0.06976953893899918, + 0.032744698226451874, + 0.07340559363365173, + 0.08997787535190582, + -0.033744510263204575, + -0.03684071823954582, + -0.0005191991222091019, + -0.02859567478299141, + -0.13013561069965363, + -0.012812117114663124, + -0.00714074308052659, + 0.056659843772649765, + 0.007996794767677784, + 0.007119663525372744, + 0.037621308118104935, + 0.049198396503925323, + -0.0010156190255656838, + -0.024883389472961426, + 0.028158826753497124, + 0.061080921441316605, + 0.034638140350580215, + 0.07525738328695297, + 0.10333201289176941, + -0.1325017213821411 + ], + [ + -0.022495703771710396, + -0.016010025516152382, + -0.024266116321086884, + -0.003703402355313301, + 0.017527369782328606, + 0.159004807472229, + 0.016453547403216362, + 0.012924930080771446, + 0.0709749087691307, + -0.04518720507621765, + 0.022061670199036598, + -0.12056085467338562, + -0.0045554740354418755, + -0.00913986749947071, + -0.17876332998275757, + 0.0009227339178323746, + -0.01741109788417816, + -0.006726282648742199, + 0.08306974172592163, + 0.09147880971431732, + 0.05467434227466583, + 0.06329692900180817, + -0.004105379339307547, + 0.0337633453309536, + -0.049023646861314774, + -0.007342121563851833, + 0.024658430367708206, + 0.02406417578458786, + 0.014786751009523869, + -0.03931219130754471, + -0.01033793855458498, + -0.006478187628090382, + -0.0034783596638590097, + -0.0103599913418293, + -0.01392274722456932, + -0.005495754536241293, + 0.035516753792762756, + -0.045847903937101364, + -0.014752183109521866, + 0.00818663090467453, + -0.029752077534794807, + -0.044768448919057846, + 0.071299247443676, + 0.11924222111701965, + -0.03440502658486366 + ], + [ + 0.01037701778113842, + 0.026998430490493774, + 0.06586451083421707, + -0.015211832709610462, + -0.033266182988882065, + -0.0795116201043129, + 0.023093054071068764, + 0.01445830799639225, + 0.011426141485571861, + -0.023018935695290565, + -0.0039683012291789055, + 0.0024486822076141834, + 0.032019659876823425, + 0.0014701877953484654, + 0.002281958470121026, + -0.012504305690526962, + -0.004029049072414637, + 0.10711884498596191, + 0.01753457635641098, + -0.013889643363654613, + 0.05143135413527489, + -0.03475813940167427, + -0.0328337736427784, + -0.13636285066604614, + -0.10292953997850418, + 0.042261753231287, + 0.030638914555311203, + -0.012393152341246605, + -0.0007874646107666194, + 0.002357605379074812, + 0.022026734426617622, + 0.012309701181948185, + 0.03602808713912964, + -0.005990192759782076, + 0.01528380811214447, + 0.08939861506223679, + 0.024183819070458412, + -0.0006722258403897285, + -0.006495789159089327, + -0.04842047020792961, + -0.06676465272903442, + -0.04371637850999832, + 0.07582321017980576, + 0.05618206411600113, + 0.0683959349989891 + ], + [ + 0.0027228104881942272, + 0.007558085024356842, + -0.01986050419509411, + -0.0036453227512538433, + -0.011269617825746536, + -0.039507731795310974, + 0.014368223026394844, + -0.001139448257163167, + -0.008862998336553574, + -0.0046951002441346645, + -0.01741824671626091, + 0.007818960584700108, + 0.01849675364792347, + -0.01158775482326746, + 0.04309699311852455, + -0.013079572468996048, + 0.0006091155228205025, + 0.03756615146994591, + 0.028269313275814056, + -0.011830338276922703, + 0.0026700987946242094, + 0.043102916330099106, + 0.03120562434196472, + 0.09325245022773743, + 0.04077508673071861, + 0.022158967331051826, + 0.09522164613008499, + -0.04337034747004509, + -0.004554356448352337, + -0.02630382776260376, + -0.026951588690280914, + -0.023328863084316254, + -0.22439415752887726, + 0.00400937395170331, + -0.012677961029112339, + 0.06592120975255966, + 0.010069012641906738, + -0.02721044234931469, + -0.009045140817761421, + -0.017200231552124023, + -0.017642805352807045, + -0.04072513058781624, + 0.04390592873096466, + 0.010296802967786789, + 0.026480067521333694 + ], + [ + -0.0196752417832613, + 0.03059542551636696, + -0.004071312490850687, + 0.012630959041416645, + -0.012701338157057762, + -0.016266096383333206, + 0.011249222792685032, + 0.020901933312416077, + 0.048582132905721664, + 0.002174713183194399, + -0.037237558513879776, + -0.05071381852030754, + 0.009539234451949596, + 0.011311905458569527, + -0.010159273631870747, + 0.002807020675390959, + -0.0008882044930942357, + 0.022457411512732506, + -0.027037745341658592, + -0.024448513984680176, + 0.003671618876978755, + 0.02926153689622879, + -0.01011715643107891, + -0.04733146354556084, + -0.023861907422542572, + 0.04451662302017212, + 0.02474619448184967, + 0.006603296846151352, + -0.03811298683285713, + 0.026141753420233727, + 0.012986866757273674, + 0.015027341432869434, + -0.02867872081696987, + 0.012971485033631325, + -0.002738230861723423, + 0.016987551003694534, + -0.0756663978099823, + 0.03939340263605118, + 0.04692529886960983, + 0.06134684756398201, + 0.18201510608196259, + -0.019963862374424934, + 0.024278445169329643, + 0.0169772207736969, + -0.05542279779911041 + ], + [ + 0.008455779403448105, + -0.03601161018013954, + -0.03855350241065025, + 0.0034931846894323826, + 0.005980742163956165, + -0.016562286764383316, + -0.016562722623348236, + 0.012683814391493797, + 0.020969104021787643, + 0.015351788140833378, + -0.007111527491360903, + 0.04647205024957657, + -0.03062833473086357, + -0.008911609649658203, + -0.11017579585313797, + -0.006154295057058334, + 0.016679061576724052, + 0.0403323695063591, + -0.0021834878716617823, + -0.009101547300815582, + -0.08985194563865662, + -0.0793597400188446, + -0.052204471081495285, + -0.0013952577719464898, + 0.07903958112001419, + -0.030070308595895767, + 0.06708218157291412, + -0.009248117916285992, + 0.005016601178795099, + -0.01787707768380642, + 0.01874777115881443, + 0.007951648905873299, + 0.03321433439850807, + 0.02069927006959915, + -0.005988914053887129, + 0.07946880906820297, + -0.022785484790802002, + -0.03427620604634285, + -0.006891249213367701, + 0.009044891223311424, + 0.01240516360849142, + -0.020538274198770523, + -0.0030606375075876713, + -0.011045640334486961, + -0.004303240682929754 + ], + [ + 0.0036664451472461224, + 0.021467870101332664, + -0.00020952789054717869, + 0.0065444465726614, + -0.008865677751600742, + 0.04278237372636795, + 0.013052728958427906, + -0.03891315683722496, + 0.00607796898111701, + 0.014812156558036804, + 0.010518539696931839, + 0.01711435057222843, + 0.03571588173508644, + -0.036704450845718384, + -0.041875384747982025, + -0.0307945404201746, + -0.029675960540771484, + -0.054111041128635406, + -0.03833693265914917, + -0.08324743807315826, + -0.00279268273152411, + -0.043840985745191574, + 0.06718762218952179, + 0.0344025082886219, + 0.01800568960607052, + 0.03542892634868622, + 0.01444207038730383, + 0.019021261483430862, + -0.023896971717476845, + 0.05687994137406349, + 0.010798582807183266, + -0.0038682506419718266, + 0.025353148579597473, + -0.05920318886637688, + 0.03211173415184021, + -0.0004010381526313722, + 0.037328433245420456, + 0.012941398657858372, + 0.00783421192318201, + -0.04482981190085411, + 0.02423698641359806, + -0.03563130274415016, + 0.09118549525737762, + -0.02624489739537239, + 0.03152075782418251 + ], + [ + 0.0007554808398708701, + 0.05242738872766495, + -0.0014067788142710924, + 0.011390752159059048, + -0.021937567740678787, + 0.004478380084037781, + -0.016847755759954453, + 0.0027125380001962185, + 0.05561840906739235, + -0.010386303998529911, + 0.014461257494986057, + 0.07208764553070068, + -0.012688713148236275, + 0.018304653465747833, + -0.04230412840843201, + 0.025743871927261353, + 6.573282007593662e-05, + 0.005349533632397652, + -0.018145034089684486, + -0.09586136043071747, + 0.028106361627578735, + 0.036183856427669525, + 0.03890823945403099, + 0.007154957391321659, + 0.02438383176922798, + -0.006218439899384975, + -0.012302412651479244, + -0.034468039870262146, + -0.028795529156923294, + -0.008399764075875282, + -0.004131385125219822, + 0.017562665045261383, + 0.00013936882896814495, + 0.03910807520151138, + -6.251157174119726e-05, + -0.025713898241519928, + -0.06022397056221962, + 0.01746310293674469, + 0.01646556332707405, + 0.018420696258544922, + -0.06146717816591263, + 0.026877349242568016, + -0.023622363805770874, + 0.08989278972148895, + 0.0073748305439949036 + ], + [ + 0.030542856082320213, + -0.008811701089143753, + -0.0523078478872776, + 0.0067365653812885284, + -0.027505842968821526, + -0.0047425576485693455, + 0.010683344677090645, + -0.014453236944973469, + 0.006321418564766645, + 0.029211604967713356, + 0.0232774056494236, + 0.07627473026514053, + 0.0030010107439011335, + -0.012009093537926674, + -0.00051021424587816, + -0.006279981695115566, + -0.02611403726041317, + -0.011486154980957508, + 0.03012763150036335, + 0.03562299162149429, + 0.019505254924297333, + -0.00682585034519434, + 0.03566821664571762, + 0.003574620932340622, + -0.04172534495592117, + 0.023055287078022957, + -0.028453411534428596, + -0.02836139313876629, + 0.03357318416237831, + -0.09116940945386887, + -0.015319928526878357, + -0.02438412979245186, + 0.016869012266397476, + 0.005463107023388147, + 0.037935856729745865, + 0.020401470363140106, + -0.004871362820267677, + 0.00804493110626936, + 0.019882427528500557, + 0.0015647263498976827, + 0.04529079794883728, + -0.004697592929005623, + 0.0108138807117939, + -0.0012828774051740766, + -0.006596735212951899 + ], + [ + 0.005858828779309988, + 0.016593964770436287, + 0.030293909832835197, + -0.0017675489652901888, + -0.003695212071761489, + -0.04358220472931862, + -0.009942498058080673, + 0.022219518199563026, + 0.07844208180904388, + -0.010169927962124348, + 0.016561392694711685, + 0.024813547730445862, + 0.0012255803449079394, + 0.013036666437983513, + -0.000728949555195868, + 0.006982212420552969, + 0.007227342575788498, + 0.01262681558728218, + 0.0005772245931439102, + 0.0446879118680954, + -0.009557729586958885, + 0.017626583576202393, + -0.005579731427133083, + 0.0305457916110754, + 0.01703699305653572, + -0.07850301265716553, + -0.04634169489145279, + -0.006922584027051926, + 0.0015523477923125029, + 0.009070397354662418, + -0.0015999304596334696, + 0.005408989265561104, + 0.0021382628474384546, + -0.002667429158464074, + -0.028962355107069016, + -0.029028646647930145, + 0.011584458872675896, + 0.01618047058582306, + 0.007066205609589815, + -0.027669474482536316, + 0.030012264847755432, + -0.03296054154634476, + 0.05482836067676544, + -0.019482742995023727, + 0.033676858991384506 + ], + [ + -0.0021477786358445883, + 0.01811544969677925, + -0.00822137575596571, + 0.0008912881603464484, + -0.0048518371768295765, + -0.00026707572396844625, + 0.01708621345460415, + -0.0006847226759418845, + -0.00736237084493041, + 0.0144194345921278, + 0.024503272026777267, + -0.03498969227075577, + 0.001738202292472124, + -0.00010192584159085527, + -0.014449493028223515, + 0.001262195990420878, + 0.0012137452140450478, + 0.05094340071082115, + -0.06767641752958298, + 0.05372973531484604, + 0.012530145235359669, + -0.020315852016210556, + 0.050124071538448334, + 0.011806163936853409, + 0.046647876501083374, + 0.023046184331178665, + -0.06621181219816208, + 0.003629519371315837, + 0.036956630647182465, + 0.026524772867560387, + 0.016542410477995872, + 0.006295610219240189, + -0.013811145909130573, + -0.00569135183468461, + 0.015433442778885365, + 0.045816875994205475, + -0.011365693993866444, + -4.4254604290472344e-05, + 0.018294615671038628, + -0.0010185466380789876, + -0.008983682841062546, + 0.004091570619493723, + -0.023012524470686913, + 0.022986240684986115, + 0.006337582133710384 + ], + [ + -0.006680434104055166, + -0.0781208947300911, + 0.03560307249426842, + 0.01747014746069908, + -0.0038246747571974993, + -0.01575591415166855, + -0.016310250386595726, + -0.017044968903064728, + 0.020810458809137344, + -0.017268521711230278, + -0.050511594861745834, + 0.008982710540294647, + 0.021877462044358253, + 0.006228135898709297, + -0.0035072253085672855, + 0.0060678282752633095, + -0.037531591951847076, + 0.0007317958516068757, + -0.0026024957187473774, + 0.03375082463026047, + -0.016674943268299103, + -0.025238024070858955, + 0.012202970683574677, + 0.02889031358063221, + -0.011467299424111843, + 0.018403641879558563, + 0.0008077493403106928, + 0.008094809018075466, + -0.01859569363296032, + 0.004368297290056944, + -0.002272853162139654, + -0.007889257743954659, + -0.003111835103482008, + -0.02613692171871662, + 0.00930595863610506, + -0.005887841805815697, + -0.037920571863651276, + 0.033110376447439194, + 0.014921287074685097, + 0.022004052996635437, + -0.025661425665020943, + 0.025482140481472015, + 0.011318915523588657, + 0.02024848572909832, + 0.00804445892572403 + ], + [ + -0.0023323947098106146, + 0.03986965864896774, + 0.023009436205029488, + -0.029371894896030426, + -0.026156244799494743, + -0.025455931201577187, + 0.03612012043595314, + 0.02219763956964016, + 0.06387332826852798, + 0.021117234602570534, + 0.01819853112101555, + -0.005769097246229649, + -0.0005661676987074316, + -0.02720893733203411, + 0.0016807635547593236, + -0.013957783579826355, + -0.008374016731977463, + -0.015384451486170292, + 0.01985081098973751, + 0.010488774627447128, + -0.04797288402915001, + -0.01961948722600937, + 0.013332879170775414, + 0.012228702194988728, + -0.026425980031490326, + 0.01883620396256447, + 0.018522778525948524, + 0.029070226475596428, + 0.0035870876163244247, + 0.01141587644815445, + -0.005484623368829489, + -0.009538937360048294, + -0.007844988256692886, + -0.014600615948438644, + 0.015341932885348797, + -0.008447336032986641, + 0.006709011737257242, + -0.022441094741225243, + -0.002629294293001294, + 0.006978550925850868, + -0.015776541084051132, + 0.026218807324767113, + -0.03923872485756874, + -0.0001955090556293726, + -0.023533381521701813 + ], + [ + -0.007703295908868313, + -0.008345683105289936, + 0.010234742425382137, + 0.016737811267375946, + 0.011508629657328129, + -0.008886181749403477, + -0.015952015295624733, + -0.013445794582366943, + 0.004053363110870123, + -0.022865070030093193, + 0.0037585790269076824, + 0.014152067713439465, + 0.010187502950429916, + 0.018974917009472847, + -0.013702861033380032, + 0.00576101103797555, + -0.00015525757044088095, + 0.05282927304506302, + -0.003934428095817566, + -0.018432650715112686, + -0.0037050058599561453, + -0.005815625190734863, + 0.03881102427840233, + 0.008209442719817162, + -0.02596147544682026, + 0.017851268872618675, + -0.010695546865463257, + -0.0007560423691757023, + 0.01206777524203062, + -0.0008125992026180029, + -0.006552835926413536, + 0.013022815808653831, + 0.0004488170670811087, + -0.008348164148628712, + 0.00013128247519489378, + -0.03856707736849785, + 0.009184446185827255, + -0.05820004269480705, + -0.037482671439647675, + 0.04758414253592491, + 0.006201918702572584, + -0.04102705419063568, + -0.012194694951176643, + -0.03271199390292168, + -0.010727118700742722 + ], + [ + 0.008452201262116432, + 0.036472272127866745, + -0.020320884883403778, + -0.003595655784010887, + -0.0016662892885506153, + 0.022026635706424713, + -0.017016971483826637, + -0.030711114406585693, + -0.031180579215288162, + -0.0034158097114413977, + 0.016970504075288773, + 0.007747208699584007, + 0.012479030527174473, + 0.02783718705177307, + -0.003990172874182463, + 0.019525159150362015, + -0.00537873525172472, + 0.03924533724784851, + 0.03242785483598709, + 0.0014453479088842869, + -0.05218246579170227, + 0.006417648401111364, + 0.003966373857110739, + -0.0003704492119140923, + -0.0263835396617651, + -0.022614246234297752, + -0.006226354744285345, + 0.03456621617078781, + 0.0012203565565869212, + 0.015491430647671223, + -0.004601797088980675, + 0.003556341864168644, + -0.013310059905052185, + -0.015447961166501045, + 0.0028846205677837133, + 0.006352880969643593, + -0.02240452729165554, + 0.017737651243805885, + 0.009904802776873112, + -0.005678647663444281, + -0.0023511068429797888, + 0.03184409812092781, + 0.02212630957365036, + -0.005479009822010994, + -0.0005827452405355871 + ], + [ + 0.013699429109692574, + 0.0033424491994082928, + -0.02516511268913746, + -0.024059422314167023, + -0.011336086317896843, + -0.001995411003008485, + 0.032384857535362244, + 0.025773167610168457, + -0.006303270813077688, + -0.009568688459694386, + -0.028605926781892776, + -0.008808879181742668, + 0.008818162605166435, + 0.02372622862458229, + 0.0014211301458999515, + 0.009698646143078804, + -0.01846197247505188, + -0.01695738360285759, + 0.01138181984424591, + -0.012916128151118755, + -0.005945347249507904, + -0.01252772193402052, + 0.0033041308633983135, + 0.021015100181102753, + -0.012405123561620712, + 0.009321194142103195, + -0.014053457416594028, + -0.02565625123679638, + 0.025283608585596085, + -0.0008273700950667262, + 0.03841831535100937, + 0.04461786523461342, + 0.006019273307174444, + -0.00516040762886405, + -0.040852297097444534, + 0.0024584317579865456, + -0.010626666247844696, + -0.007327461149543524, + -0.0016056908061727881, + 0.003469041781499982, + -0.007411503698676825, + 0.0075955018401145935, + 0.01313801109790802, + -0.008599716238677502, + 0.0018994261045008898 + ], + [ + 0.0005498353275470436, + -0.0319436676800251, + -0.018350139260292053, + -0.017125563696026802, + -0.0034153282176703215, + -0.004779968410730362, + 0.045640673488378525, + 0.0047793020494282246, + -0.006410537287592888, + 0.015605989843606949, + -0.012619836255908012, + 0.0014300135662779212, + -0.006947851274162531, + -0.008033467456698418, + -0.011409820057451725, + -0.003566782223060727, + 0.004699454642832279, + 0.05236497148871422, + 0.0046944874338805676, + -0.025257842615246773, + -0.002994734328240156, + 0.015204668045043945, + -0.021089112386107445, + 0.02023189887404442, + -0.0010332213714718819, + 0.00785747915506363, + -0.017137425020337105, + 0.004313879180699587, + -0.010306312702596188, + 0.00681322580203414, + -0.0034331935457885265, + -0.017821529880166054, + 0.004885640926659107, + 0.002734225010499358, + 0.009505352936685085, + -0.035248950123786926, + 0.015221849083900452, + 0.009780111722648144, + 0.017998438328504562, + -0.024638639762997627, + 0.004241432528942823, + 0.0002386040287092328, + -0.006098208483308554, + 0.005580107681453228, + -0.0033594027627259493 + ], + [ + 0.011463717557489872, + 0.018766270950436592, + 8.590449579060078e-05, + 0.008514202199876308, + -0.0025223407428711653, + 0.004520431160926819, + -0.004915399011224508, + -0.0035596643574535847, + -0.004628830123692751, + -0.005701262969523668, + 0.015460366383194923, + -0.002019961131736636, + 0.002108558313921094, + 0.004662561230361462, + 0.005207268986850977, + 0.00492060324177146, + -0.0009433203376829624, + 0.01866292394697666, + 0.003009023144841194, + 0.02914227545261383, + 0.007199206855148077, + -0.030993036925792694, + 0.0009944384219124913, + -0.007819466292858124, + 0.02571331523358822, + 0.027489913627505302, + 0.0458693727850914, + -0.019027728587388992, + 0.008148781023919582, + -0.008761102333664894, + 0.001935663167387247, + 0.011475535109639168, + 0.004167452920228243, + -0.007355986628681421, + -0.012524465098977089, + -0.059944238513708115, + -0.0058122919872403145, + 0.008124226704239845, + 0.010307827964425087, + -0.023800605908036232, + 0.009315493516623974, + 0.00832737609744072, + -4.363518382888287e-05, + 0.00952251348644495, + 0.00479150889441371 + ], + [ + 0.007256949786096811, + 0.03601238131523132, + 0.008004694245755672, + 0.00789707712829113, + -0.02580997906625271, + 0.01090193074196577, + 0.0025381019804626703, + -0.03368000686168671, + 0.0003010774089489132, + -0.0074155586771667, + -0.023931991308927536, + 0.0072115082293748856, + 0.008573928847908974, + -0.015263441950082779, + -0.005148470867425203, + -0.015294834040105343, + -0.0012072762474417686, + 0.005141685716807842, + -0.0033261156640946865, + 0.01706104539334774, + 0.004741530865430832, + 0.007806902285665274, + -0.04825945943593979, + 0.01429178286343813, + 0.011948144994676113, + 0.018770279362797737, + -0.02372652105987072, + -0.01763378269970417, + -0.028210734948515892, + -0.006298302207142115, + 0.0014280491741374135, + 0.003089534817263484, + 0.0033092449884861708, + -0.0066254884004592896, + 0.002827590797096491, + 0.0018842555582523346, + -0.001385998446494341, + -0.021954011172056198, + -0.005023208912461996, + 0.013155384920537472, + -0.004307809751480818, + 0.009782231412827969, + 0.003535080701112747, + -0.013928093016147614, + 0.0024879719130694866 + ], + [ + 0.003366141114383936, + 0.001687322393991053, + -0.01570109836757183, + -0.015234448947012424, + 0.0031824191100895405, + -0.00045815485646016896, + 0.037793535739183426, + -0.005354998167604208, + 0.0071918657049536705, + -0.023681409657001495, + -0.010527492500841618, + 0.011286735534667969, + 0.012779594399034977, + 0.05186581239104271, + 0.002791260601952672, + 0.016553761437535286, + 0.005137853790074587, + -0.018263358622789383, + -0.006586691364645958, + 0.018161306157708168, + -0.0006730396416969597, + -0.0024377235677093267, + 0.0020296117290854454, + -0.01403796300292015, + 0.0181891992688179, + 0.002181856893002987, + 0.009124440141022205, + 0.008268313482403755, + -0.014520031400024891, + 0.011442477814853191, + -0.020665081217885017, + -0.004358141217380762, + 0.002436751965433359, + 0.019096149131655693, + 0.036353468894958496, + -0.0007185546564869583, + 0.009594779461622238, + -0.010843418538570404, + -0.004595187492668629, + 0.0050412374548614025, + -0.001194936572574079, + -0.003474273718893528, + 0.005747854709625244, + -0.0004006402159575373, + 0.005848734173923731 + ], + [ + 0.005553645547479391, + -0.014829201623797417, + 0.0030189836397767067, + 0.0019321298459544778, + 0.017568906769156456, + -0.00797270517796278, + 0.005635120905935764, + -0.01206644345074892, + 0.015796935185790062, + -0.018575400114059448, + 0.04890725389122963, + -0.0016979356296360493, + 0.003093397943302989, + -3.803123036050238e-05, + 0.005210451781749725, + 0.01655028946697712, + -0.014960705302655697, + -0.0072961971163749695, + 0.006164019927382469, + -0.016866367310285568, + 9.394375956617296e-05, + 0.00788196176290512, + -0.03728523850440979, + 0.004866194445639849, + 0.011433728970587254, + 0.030994391068816185, + -0.008317786268889904, + -0.0005908770835958421, + 0.028398267924785614, + 0.010313336737453938, + -0.004636738914996386, + -0.006355722900480032, + -0.0020106921438127756, + -0.0023178497795015574, + -0.002649511443451047, + 0.007435911800712347, + -0.007661172188818455, + 0.0035062299575656652, + 0.0012476628180593252, + 0.009976162575185299, + -0.0006027839845046401, + 0.00467014592140913, + 0.009400233626365662, + -0.005134325008839369, + 0.0012779058888554573 + ], + [ + 0.016268962994217873, + -0.0030379893723875284, + 0.0016002239426597953, + -0.0017761185299605131, + 0.011487155221402645, + -0.0018621287308633327, + -0.007008627522736788, + 0.025422925129532814, + 0.0014716434525325894, + 0.013461830094456673, + -0.0006149964756332338, + 0.0025843719486147165, + -0.005991258658468723, + 0.00429184827953577, + -0.002218527952209115, + -0.01678687147796154, + 0.03942489251494408, + -0.0030056287068873644, + 0.010203512385487556, + 0.004870238713920116, + -0.003717053448781371, + 0.01388748362660408, + 0.004829874727874994, + -0.009799350053071976, + 0.015065329149365425, + 0.028329303488135338, + -0.011301170103251934, + 0.01473788172006607, + -0.005888659041374922, + -0.01177951693534851, + 0.008465822786092758, + 0.009206120856106281, + -0.003913552034646273, + -0.00923006422817707, + 0.014665525406599045, + -0.008997058495879173, + -0.0203713309019804, + 0.007739989552646875, + 0.0003181488427799195, + 0.006230671890079975, + -0.024753211066126823, + -0.005024408455938101, + 0.030169211328029633, + -0.018536392599344254, + -0.01616176776587963 + ], + [ + -0.01910393126308918, + 0.00871589407324791, + -0.0007106777629815042, + 0.011063016019761562, + 0.014131869189441204, + 0.0008084847358986735, + -0.015428530983626842, + 0.013651038520038128, + -0.003771252231672406, + -0.0029813917353749275, + -0.0006631190772168338, + 0.01307733729481697, + 0.0011754566803574562, + 0.003673878964036703, + -0.0005424732225947082, + 0.012685926631093025, + -0.01106881070882082, + -0.004886738024652004, + 0.018098043277859688, + 0.009469973854720592, + -0.01951996050775051, + -0.0028987592086195946, + -0.00014805985847488046, + 0.005811454262584448, + -0.0007206271402537823, + 0.03397194668650627, + -0.017788013443350792, + 0.0011831517331302166, + -0.024638516828417778, + 0.005374486092478037, + 0.00321221468038857, + 0.011132437735795975, + -0.000902124447748065, + 0.0193793922662735, + -0.008925841189920902, + 0.008256366476416588, + 0.024065475910902023, + 0.011761298403143883, + 0.009709605947136879, + -0.017639119178056717, + -0.0005737878382205963, + -0.02368728071451187, + -0.014572097919881344, + 0.005914192646741867, + -0.0058142333291471004 + ], + [ + 0.006649153307080269, + 0.011156578548252583, + -0.008667523972690105, + -0.0024249095004051924, + 0.01574135012924671, + 0.0011882235994562507, + 0.0017571536591276526, + 0.010584973730146885, + -0.004354995675384998, + -0.0049141691997647285, + 0.0049272626638412476, + 0.005947389639914036, + 0.00042120664147660136, + 0.0038913593161851168, + -0.0007645332952961326, + -0.010488866828382015, + 0.022886788472533226, + 0.001011396641843021, + -0.0007216680096462369, + 0.0016741305589675903, + -0.0006031721131876111, + -0.010788626037538052, + -0.011402606032788754, + 0.01740707829594612, + -0.011967460624873638, + -0.0019294061930850148, + 0.004990001674741507, + -0.00919059943407774, + -0.0014225010527297854, + 0.0027023048605769873, + 0.00430525466799736, + 0.0008500043186359107, + 0.00021649774862453341, + -0.02432853728532791, + 0.008758123964071274, + 0.00063912762561813, + 0.021570000797510147, + 0.031498074531555176, + -0.0023175596725195646, + 0.03836365416646004, + 0.004075958859175444, + -0.0053198630921542645, + -0.012724303640425205, + 0.016394853591918945, + 0.023500945419073105 + ], + [ + 0.025469960644841194, + 0.002241270150989294, + 0.0004540280206128955, + 0.0009244878892786801, + 0.00957506150007248, + 0.0016278807306662202, + -0.013423891738057137, + 0.017305035144090652, + -0.0015235238242894411, + -0.01020258292555809, + 0.007669116836041212, + -0.0051340158097445965, + -0.0010782890021800995, + 0.0064316983334720135, + 0.0004841153568122536, + -0.0019476833986118436, + -0.004271430429071188, + 0.0017103132558986545, + -0.013322128914296627, + -0.0011082219425588846, + 0.011887851171195507, + -0.01331858430057764, + -0.013245712965726852, + 0.024600394070148468, + -0.02362148091197014, + -0.009075438603758812, + 0.0033172816038131714, + -0.013799937441945076, + -0.009887240827083588, + 0.010585967451334, + -0.01044811587780714, + 0.009472894482314587, + -0.0005284295184537768, + -0.001361248199827969, + 0.02594010718166828, + 0.005964653566479683, + -0.014829547144472599, + -0.007003128994256258, + 0.007044685073196888, + -0.022479651495814323, + -0.0024220854975283146, + -0.006172834429889917, + -0.006807849742472172, + -0.0051607307977974415, + -0.02364627830684185 + ], + [ + -0.01351979747414589, + 0.01071852445602417, + -0.012320837005972862, + -0.016693919897079468, + 0.006019806023687124, + 0.0020079852547496557, + -0.0005733276484534144, + 0.013488642871379852, + -0.0022639636881649494, + -0.015679996460676193, + -0.0021946877241134644, + 0.014166414737701416, + -0.011975525878369808, + 0.0002792203158605844, + 0.0011187032796442509, + 0.004500983282923698, + 0.0045900000259280205, + 0.0030179214663803577, + -0.01949739269912243, + 0.005226042587310076, + 0.01172392163425684, + -0.010197860188782215, + 0.00011359209747752175, + 0.00680533517152071, + -0.008970695547759533, + 0.011310221627354622, + 0.0019072643481194973, + 0.012785663828253746, + -0.009152069687843323, + -0.003070997539907694, + -0.016211656853556633, + -0.034127477556467056, + 0.007422042544931173, + -0.010613166727125645, + -0.03430658206343651, + 0.005655908957123756, + -0.01145249791443348, + -0.0023543487768620253, + -0.0009709355654194951, + -0.0010431264527142048, + -0.004303919151425362, + 0.003024874720722437, + 0.008601179346442223, + -0.006732244975864887, + -0.00464903749525547 + ], + [ + -0.0018222297076135874, + -0.008018524385988712, + 0.0018628957914188504, + 0.0042664408683776855, + 0.006818541791290045, + -0.0037505687214434147, + -0.002596283331513405, + -0.021230431273579597, + 0.012844810262322426, + -0.009535901248455048, + -0.007141975220292807, + 0.0073865195736289024, + 0.005147598218172789, + 0.018142197281122208, + -0.0008456947398371994, + -0.014044527895748615, + 0.03152196854352951, + -0.00707216328009963, + 0.0026926815044134855, + -0.006269850768148899, + 0.0017533364007249475, + 0.0038201462011784315, + 0.00037060832255519927, + 0.00010697160905692726, + -0.0029500171076506376, + 0.00594440707936883, + -0.0039031344931572676, + 0.00724786426872015, + 0.009470655582845211, + -0.0054483539424836636, + -0.00194179720710963, + 0.010519498027861118, + -0.001662794267758727, + -0.02456548810005188, + -0.012444569729268551, + 0.010108706541359425, + 0.004587135743349791, + -0.013706480152904987, + 0.01263850275427103, + -0.026247264817357063, + 0.011768225580453873, + 0.007999289780855179, + -0.017324434593319893, + 0.012651980854570866, + -0.003926132805645466 + ], + [ + -0.020188461989164352, + -0.006698640063405037, + -0.004112639930099249, + 0.0007280276040546596, + -0.002634846605360508, + -0.0005515202647075057, + 0.02123960293829441, + -0.019464543089270592, + 0.0001587762963026762, + -0.008599312044680119, + 0.018271788954734802, + 0.005116484593600035, + -0.011170112527906895, + -0.008449687622487545, + 0.004154965281486511, + -0.004302148707211018, + 0.0056588854640722275, + 0.0011641217861324549, + -0.0007424535579048097, + 0.0075840638019144535, + 0.0027921327855437994, + -0.002816914115101099, + 0.006281680427491665, + -0.005265111103653908, + -0.004990534391254187, + -0.012470233254134655, + 0.007389815989881754, + -0.013034018687903881, + -0.014276803471148014, + 0.0028325908351689577, + 0.00015733606414869428, + 0.02050873078405857, + -0.000681033416185528, + -0.004920493811368942, + -0.006407404784113169, + 0.0035581106785684824, + -5.272514317766763e-05, + 0.00918152742087841, + 0.02351958677172661, + 0.01028912141919136, + -0.01544911041855812, + -0.009533251635730267, + 0.003677165601402521, + -0.012997119687497616, + -0.021199379116296768 + ], + [ + -0.004125118721276522, + 0.002716755261644721, + -0.002079974627122283, + -0.012317268177866936, + -0.01793847046792507, + 0.001064602518454194, + -0.011365224607288837, + 0.009342136792838573, + -0.002003356348723173, + -0.012669265270233154, + -0.008233653381466866, + 0.004353824071586132, + -0.0006793754873797297, + -0.00042605173075571656, + 0.0007317265844903886, + 0.018481364473700523, + -0.011487459763884544, + 1.1120008821308147e-05, + 0.00423803785815835, + -0.0026452094316482544, + 0.00037203254760243, + 0.01218781154602766, + -0.0036872359924018383, + -0.012102054432034492, + 0.016199873760342598, + -0.005179674830287695, + 0.0008002324029803276, + -0.0035961114335805178, + 0.008391482755541801, + -0.0016886505763977766, + 0.010650482028722763, + -0.01012666430324316, + -0.0008882181136868894, + -0.03662685677409172, + 0.013940184377133846, + -0.00019492879800964147, + 0.0035409636329859495, + 0.0030869427137076855, + 0.007558043114840984, + -0.0018918249988928437, + -0.0034307725727558136, + -0.018756462261080742, + -0.01163308322429657, + -0.0016895552398636937, + -0.013370579108595848 + ], + [ + -0.005524301901459694, + -0.000977616640739143, + 0.0042923144064843655, + -0.0044611855410039425, + -0.03307471424341202, + 0.004687427543103695, + -0.011328428983688354, + -0.006677189841866493, + -0.001559663680382073, + -0.01713346131145954, + -0.0063633667305111885, + -0.005269331391900778, + -0.01711510866880417, + 0.00409148633480072, + 0.0010997614590451121, + -0.009250601753592491, + 0.017806652933359146, + -0.003262654412537813, + 0.002498725662007928, + -0.003155801212415099, + -0.00750375771895051, + -0.006839435547590256, + 3.0091141525190324e-05, + 0.00972696766257286, + -0.005752003286033869, + 0.008066670969128609, + -0.0008695494034327567, + -0.0028020453173667192, + 0.024329842999577522, + 0.010055238381028175, + -0.00648196367546916, + -0.00644444627687335, + 0.0021590455435216427, + 0.02172566018998623, + 0.005145265720784664, + -0.0022447656374424696, + 0.0009226002148352563, + 0.013772794045507908, + 0.00182228849735111, + -0.0007712747901678085, + -0.0011832761811092496, + -0.010212586261332035, + 0.003960642497986555, + -0.001467913738451898, + -0.0018004805315285921 + ], + [ + 0.03515428304672241, + -0.011563684791326523, + 5.127954136696644e-05, + -0.008626585826277733, + -0.02314571663737297, + 6.98980875313282e-05, + -0.00612989068031311, + -0.004387413617223501, + -0.001388968201354146, + 0.006792738102376461, + 0.012103176675736904, + -0.005686194635927677, + -0.005020753014832735, + 0.008832537569105625, + 0.001394168590195477, + -0.0007854663417674601, + -0.006541788578033447, + -0.0008951985510066152, + -0.003009835258126259, + -0.0010391295654699206, + 0.002785543678328395, + 0.0005243027117103338, + 0.00155424396507442, + -0.0019627693109214306, + 0.0042829508893191814, + 0.0028527246322482824, + 0.0003181783249601722, + 0.018253978341817856, + -0.018093591555953026, + -0.00162507442291826, + -0.011009054258465767, + 0.008197661489248276, + -0.0004319990111980587, + -0.0015664614038541913, + -0.020246168598532677, + 0.002907453803345561, + 0.00868506170809269, + 0.0005308574764057994, + 0.006167100742459297, + 0.009623652324080467, + -0.00046505016507580876, + -0.012614063918590546, + -0.007564898580312729, + 0.0036153290420770645, + 0.0028734186198562384 + ], + [ + -0.006848630495369434, + -0.014676680788397789, + -0.009769118390977383, + -0.012179943732917309, + -0.012238305993378162, + 0.00032428783015348017, + -0.0036104496102780104, + 0.005420226138085127, + -0.002557483036071062, + -0.017464030534029007, + 0.016143344342708588, + 0.0009277384378947318, + -0.009015277028083801, + -0.013943755067884922, + 0.00460736732929945, + 0.002973730443045497, + 0.0087607866153121, + -0.0005613718531094491, + 0.0036566469352692366, + 0.0045868949964642525, + -0.0047304825857281685, + 0.0024165157228708267, + 0.006300253327935934, + -0.0016411547549068928, + -0.001050203456543386, + 0.00394306518137455, + -0.005538031924515963, + -0.004923299886286259, + -0.020530205219984055, + 0.0038366327062249184, + 0.007140693254768848, + 0.010964092798531055, + -0.0011853117030113935, + -0.005705537740141153, + 0.008157758973538876, + 0.0002241577603854239, + -0.009419707581400871, + -0.0068698907271027565, + -0.023548549041152, + -0.008388974703848362, + 0.0130734508857131, + 0.010252144187688828, + 1.2216474942761124e-06, + 0.00706624798476696, + 0.013854717835783958 + ], + [ + 0.01693136803805828, + -0.0031372110825031996, + -0.0032617931719869375, + -0.0029004744719713926, + 0.0023515133652836084, + -0.0012261347146704793, + -0.0008195796981453896, + -0.0045450902543962, + 0.0002698961179703474, + -0.012286235578358173, + 0.004514013882726431, + -0.002021311316639185, + 0.02715420164167881, + -0.009062565863132477, + -0.002124266466125846, + -0.0019582815002650023, + 0.012648486532270908, + -0.00198007351718843, + 0.005776786711066961, + 0.002679277677088976, + -0.004615813959389925, + 0.0010468489490449429, + 0.009104210883378983, + -0.004296170547604561, + -0.0009241654188372195, + -0.0032640951685607433, + -0.0007544599939137697, + -0.011452611535787582, + -0.006633649580180645, + 0.009103978984057903, + 0.020047502592206, + -0.024611419066786766, + 0.002058004029095173, + 0.011610561981797218, + -0.008307352662086487, + -0.0008427408756688237, + -0.0020841893274337053, + -0.0008554468513466418, + 0.005969997029751539, + 0.0012433535885065794, + -0.002217241795733571, + -0.003036477603018284, + -0.006090730894356966, + -0.000806248455774039, + -0.0069640884175896645 + ], + [ + -0.011200237087905407, + -0.002682264195755124, + -0.005129658617079258, + 0.01287020742893219, + -0.015529340133070946, + 0.002236587693914771, + 0.0033685883972793818, + 0.01608942449092865, + -0.005459952168166637, + -0.007895588874816895, + 0.007723725866526365, + 0.007836567237973213, + 0.028264129534363747, + -0.0039421068504452705, + -0.0007943419041112065, + -0.008082963526248932, + 0.0012483496684581041, + 0.00024735109764151275, + -0.007795915473252535, + -0.0006657515186816454, + 0.004122520796954632, + -0.00032066713902167976, + -0.009191579185426235, + 0.002754528308287263, + 0.0012004548916593194, + -0.005048173945397139, + 0.0024772307369858027, + 0.019257336854934692, + 0.004682609811425209, + -0.0063582295551896095, + -0.0013501865323632956, + 0.009260028600692749, + -0.002344950335100293, + 0.0036942947190254927, + -0.0008335068705491722, + -0.0024531492963433266, + -0.0011307316599413753, + 0.0028964411467313766, + -0.0007962941308505833, + -0.0002912830968853086, + -0.0014758592005819082, + -0.0003035767294932157, + -0.0029652034863829613, + -0.0007014012662693858, + -0.002067896071821451 + ], + [ + 0.006195953115820885, + 0.004878598265349865, + -0.0003935272397939116, + -0.004443035461008549, + 0.007727185729891062, + -0.000876554346177727, + 0.009658520109951496, + -0.01595279574394226, + 0.003571760607883334, + -0.001960332738235593, + -0.005607483442872763, + 0.0023387304972857237, + -0.00044839971815235913, + -0.006612768862396479, + 0.00036678201286122203, + 0.0020738537423312664, + -0.001467038644477725, + -0.0005324701778590679, + -0.0030059574637562037, + 0.0018900817958638072, + 0.0018344158306717873, + -0.0016157120699062943, + -0.001982525922358036, + 0.0015301307430490851, + -0.0006320856045931578, + -0.0015300542581826448, + 0.0004552277678158134, + 0.011698273941874504, + 0.00291864899918437, + -0.005847232416272163, + 0.005462152883410454, + 0.00436872523277998, + -0.0020734984427690506, + 0.0019671269692480564, + -0.0017167485784739256, + -0.0006844321615062654, + -0.011944874189794064, + 0.018293635919690132, + -0.024431472644209862, + -0.008728659711778164, + -0.001507685985416174, + -0.0172902699559927, + -0.008215625770390034, + 0.002193963620811701, + -0.006942014209926128 + ], + [ + -0.006958430632948875, + -0.0022303597070276737, + 0.005184312351047993, + 0.012648834846913815, + -0.011840980499982834, + 0.001093623461201787, + 0.004257561638951302, + -0.0024148873053491116, + -6.128253517090343e-06, + 0.015434450469911098, + 0.009783624671399593, + -0.001900278264656663, + -0.004381220322102308, + 0.01729026436805725, + -0.0005190964438952506, + 0.014209604822099209, + 0.00821677315980196, + -0.0034260055981576443, + -0.0032596851233392954, + -0.00029968336457386613, + 0.0037785160820931196, + -0.0056799608282744884, + -0.00553898373618722, + 0.007639668881893158, + -0.00702810101211071, + 0.00034543455694802105, + 0.0021905112080276012, + 0.00044441065983846784, + -0.007309116888791323, + -0.0037157414481043816, + 0.020854750648140907, + -0.011359392665326595, + -0.003255574032664299, + -0.0026862621307373047, + 0.0005511222407221794, + 0.00021524043404497206, + -0.0009734127670526505, + -0.0015755231725052, + -0.006016546860337257, + -0.0024158258456736803, + -0.0008244782220572233, + -0.0018912493251264095, + 0.0010421037441119552, + -0.0010570120066404343, + -0.0015601996565237641 + ], + [ + 0.003647628240287304, + -0.00018447083130013198, + 0.002302320674061775, + 0.019668636843562126, + -0.005486125126481056, + 0.0024349757004529238, + 0.012231294997036457, + 0.00772100267931819, + -0.003329834435135126, + 0.0013343141181394458, + 0.001208956353366375, + -0.0020790330599993467, + 6.166790990391746e-05, + 0.004452846944332123, + -0.0005777273909188807, + -0.005199058447033167, + -0.0003697117790579796, + -0.00018579346942715347, + 0.0067620184272527695, + -3.727666990016587e-05, + -0.00733867147937417, + 0.003955514635890722, + 0.004073562566190958, + -0.004260346293449402, + 0.0031335647217929363, + -0.00019730959320440888, + -0.002632821211591363, + -0.013581051491200924, + -0.0005630888626910746, + 0.007590027526021004, + -0.016562901437282562, + -0.005802590399980545, + 0.005058777052909136, + -0.009642614983022213, + -0.007131681311875582, + 0.0044797747395932674, + -0.005954313091933727, + 0.005144465249031782, + -0.01214740239083767, + -0.0032915498595684767, + -0.00021614471916109324, + -0.002521343994885683, + -0.0029287212528288364, + 0.0007636473746970296, + -0.0029460457153618336 + ], + [ + -0.011385489255189896, + -0.0016915265005081892, + -0.001666159019805491, + -0.015864713117480278, + -0.00021571248362306505, + -0.0007149733137339354, + -0.008138676173985004, + -0.002941141603514552, + 0.0008013822371140122, + 0.009584633633494377, + 0.007182879839092493, + -0.0018052182858809829, + 0.008119347505271435, + 0.014607059769332409, + -0.0010591051541268826, + -0.01992860622704029, + -0.008581474423408508, + 0.00114027492236346, + 0.0008870703750289977, + 0.0003767914022319019, + 0.0006957599543966353, + 0.001548565924167633, + -0.002454499015584588, + -0.0003648319689091295, + 0.0022865652572363615, + 0.00047113915206864476, + -0.0003511659160722047, + -0.00813040230423212, + -0.0025104819796979427, + 0.0006113907438702881, + 0.00017049889720510691, + -0.0026401046197861433, + 0.0013247268507257104, + -0.00026928307488560677, + -0.0008339969790540636, + -0.0011175669496878982, + -0.0039907181635499, + 0.003607675665989518, + -0.006059848703444004, + -0.0009034615359269083, + -0.0009394243825227022, + -0.0025823896285146475, + -0.0020878673531115055, + 0.00018126489885617048, + -0.00236976589076221 + ], + [ + 0.004028908908367157, + 0.0017049856251105666, + 5.370379949454218e-05, + 0.009394863620400429, + 0.0027828046586364508, + 0.00041421252535656095, + 0.004086229484528303, + 0.0026524735148996115, + -0.0008281071786768734, + -0.012737114913761616, + 0.0015140045434236526, + 0.0029696577694267035, + -0.013778825290501118, + 0.005635555367916822, + 0.0025300418492406607, + -0.017714232206344604, + -0.01195415761321783, + 0.0011275607394054532, + 0.00019706611055880785, + -0.00029546022415161133, + -0.0010664897272363305, + 0.001903712167404592, + 0.0012171977432444692, + -0.001963118091225624, + 0.0015075091505423188, + -0.0019480364862829447, + -3.5159191611455753e-05, + 0.007157324813306332, + -0.0012632799334824085, + -0.0013232915662229061, + 0.014227595180273056, + -0.005578646436333656, + -0.002583068795502186, + 0.0010458779288455844, + 0.0015559825114905834, + -0.0011084281140938401, + 0.0011413419852033257, + -0.0017934181960299611, + 0.001995643600821495, + -0.0004406919761095196, + 0.0009208093979395926, + 0.0016424627974629402, + -0.0008877772488631308, + 0.0008989704656414688, + 0.0007891377899795771 + ] + ], + "right":[ + [ + -0.0065002827905118465, + 0.043443795293569565, + -0.10452164709568024, + -0.042900316417217255, + 0.011481651104986668, + -0.3076476752758026, + 0.014990712516009808, + -0.06449854373931885, + -0.11080695688724518, + 0.18507879972457886, + 0.07475540786981583, + -0.37530016899108887, + -0.016014879569411278, + 0.029576366767287254, + -0.3004041910171509, + -0.014458670280873775, + -0.032006122171878815, + -0.23033395409584045, + 0.5604633688926697, + -0.10498113185167313, + -0.5397933721542358, + -0.048614535480737686, + -0.057744402438402176, + -0.21925750374794006, + 0.21002037823200226, + 0.02164149098098278, + -0.1638900339603424, + 0.2000914216041565, + -0.07326802611351013, + -0.5309985876083374, + 0.12327712029218674, + -0.06226075068116188, + -0.22608619928359985, + 0.05398350954055786, + 0.01169112604111433, + -0.23691436648368835, + -0.12490949779748917, + 0.09014706313610077, + 0.034181635826826096, + 0.1123010590672493, + -0.1116921454668045, + -0.008618329651653767, + -0.07716584950685501, + 0.026859739795327187, + 0.040021784603595734 + ], + [ + -0.061107732355594635, + -0.028760293498635292, + -0.0852101594209671, + -0.055592115968465805, + -0.024639930576086044, + 0.13902151584625244, + 0.09642048180103302, + 0.000601884035859257, + 0.28239816427230835, + 0.04764389991760254, + -0.022113285958766937, + -0.06034102290868759, + -0.04805706813931465, + 0.05859246477484703, + 0.22738094627857208, + -0.05480566620826721, + -0.014107032679021358, + 0.2478177398443222, + 0.2947772443294525, + 0.031076615676283836, + -0.14423705637454987, + -0.43971437215805054, + 0.12156734615564346, + 0.17276380956172943, + 0.0023738087620586157, + 0.12386418879032135, + 0.3269002437591553, + 0.078597791492939, + -0.009026134386658669, + -0.14061298966407776, + -0.10282959043979645, + 0.07330198585987091, + 0.36222153902053833, + -0.04677493870258331, + 0.004285688046365976, + 0.21868270635604858, + -0.28034910559654236, + 0.18166810274124146, + -0.14600247144699097, + 0.05381167680025101, + -0.055320415645837784, + 0.21162423491477966, + -0.033887237310409546, + -0.1577225774526596, + -0.10935050249099731 + ], + [ + -0.1064336821436882, + -0.09303449094295502, + 0.3014002740383148, + 0.14722639322280884, + -0.0010746014304459095, + 0.1641043722629547, + -0.06178005784749985, + 0.05960696563124657, + -0.16366051137447357, + -0.07701944559812546, + 0.026868093758821487, + 0.4157949686050415, + 0.03778314217925072, + -0.00812435895204544, + -0.19374501705169678, + -0.002125783823430538, + -0.009603243321180344, + -0.05753939226269722, + 0.25259846448898315, + 0.1577707976102829, + -0.07957044243812561, + -0.1732443869113922, + -0.05973230302333832, + -0.1290857344865799, + 0.12797924876213074, + 0.04824599251151085, + 0.04825138673186302, + 0.0992354080080986, + 0.13283026218414307, + 0.20355944335460663, + -0.06458800286054611, + 0.06507676094770432, + -0.20480415225028992, + -0.02290191687643528, + -0.08383756130933762, + -0.04287495091557503, + -0.13255465030670166, + 0.01144093181937933, + -0.0876108929514885, + 0.004888252820819616, + 0.0881739929318428, + 0.10281384736299515, + -0.0422121025621891, + 0.0601053312420845, + -0.15104496479034424 + ], + [ + -0.02684294432401657, + 0.009411846287548542, + -0.20996080338954926, + -0.08695130795240402, + -0.051755040884017944, + -0.2733485996723175, + 0.11699758470058441, + -0.04745694622397423, + 0.028511589393019676, + 0.009152330458164215, + -0.04008851200342178, + -0.006335575599223375, + 0.024406949058175087, + 0.0012957986909896135, + -0.11565860360860825, + -0.00551746366545558, + -0.02275954745709896, + -0.005480325315147638, + -0.0604497529566288, + 0.0009865796891972423, + 0.17491832375526428, + 0.09513134509325027, + 0.0022878695745021105, + -0.04317467287182808, + -0.042537592351436615, + -0.01697908714413643, + -0.053470633924007416, + -0.01982058584690094, + -0.009610535576939583, + 0.13786230981349945, + 0.002854656893759966, + 0.008014212362468243, + -0.07683318108320236, + 0.01611691154539585, + -0.01680651679635048, + -0.025259772315621376, + -0.24394449591636658, + 0.09612732380628586, + -0.1807442307472229, + -0.022753261029720306, + -0.01281095203012228, + 0.2177630364894867, + -0.10621364414691925, + 0.0392993800342083, + -0.28347688913345337 + ], + [ + 0.016318397596478462, + 0.03648126870393753, + -0.05039751157164574, + -0.025695865973830223, + 0.00026572769274935126, + -0.21002613008022308, + 0.0035516363568603992, + -0.04446924105286598, + -0.14934365451335907, + -0.04689402133226395, + 0.013387766666710377, + 0.016596781089901924, + 0.015022682957351208, + -0.027603743597865105, + 0.011148272082209587, + 0.013118156231939793, + -0.0022309001069515944, + -0.07036174833774567, + 0.02704538218677044, + 0.07914850860834122, + -0.049424465745687485, + -0.15908393263816833, + -0.004625978413969278, + 0.11610068380832672, + 0.016215333715081215, + 0.014706678688526154, + 0.08644839376211166, + -0.04509324952960014, + 0.032722990959882736, + 0.08445950597524643, + -0.018176842480897903, + 0.008074218407273293, + 0.15422053635120392, + 0.014123921282589436, + 0.006887519732117653, + -0.028760502114892006, + 0.062480904161930084, + -0.10739129781723022, + 0.025675108656287193, + -0.02488124929368496, + -0.009264294058084488, + -0.09507975727319717, + -0.060702573508024216, + 0.16679802536964417, + -0.01834101229906082 + ], + [ + -0.006860456429421902, + -0.03430275619029999, + -0.10541100054979324, + -0.02925351820886135, + 0.008854901418089867, + 0.11257459968328476, + 0.027404753491282463, + -0.005477671977132559, + 0.08789193630218506, + -0.057081662118434906, + -0.023918181657791138, + 0.02883325144648552, + -0.0057859825901687145, + -0.027394162490963936, + 0.20066726207733154, + 0.017585940659046173, + 0.004243167582899332, + 0.01484750583767891, + 0.06808744370937347, + -0.011725028976798058, + -0.0977899506688118, + 0.08463256806135178, + -0.060994572937488556, + -0.06084435060620308, + 0.007171604782342911, + -0.027576690539717674, + -0.05295417457818985, + 0.017242006957530975, + 0.02075306512415409, + 0.031001586467027664, + -0.004191653337329626, + -0.009333438239991665, + -0.02929551713168621, + 0.011278285644948483, + -0.010517350398004055, + -0.0384749099612236, + -0.020004993304610252, + -0.07694586366415024, + -0.019894175231456757, + -0.025658152997493744, + -0.031197229400277138, + -0.06406533718109131, + -0.09038729965686798, + 0.159688338637352, + -0.07271291315555573 + ], + [ + -0.003293597837910056, + -0.020608481019735336, + -0.17736975848674774, + -0.06540100276470184, + -0.013043813407421112, + 0.006177311297506094, + 0.045163240283727646, + -0.012492994777858257, + 0.0917729064822197, + -0.05963604897260666, + -0.032804813235998154, + 0.018690932542085648, + 0.024753771722316742, + -0.01309316884726286, + -0.052925147116184235, + 0.009612965397536755, + -0.014142473228275776, + -0.030030885711312294, + 0.03968771547079086, + 0.056855760514736176, + 0.024758173152804375, + -0.06976953893899918, + -0.032744698226451874, + -0.07340559363365173, + 0.08997787535190582, + 0.033744510263204575, + 0.03684071823954582, + -0.0005191991222091019, + 0.02859567478299141, + 0.13013561069965363, + -0.012812117114663124, + 0.00714074308052659, + -0.056659843772649765, + 0.007996794767677784, + -0.007119663525372744, + -0.037621308118104935, + 0.049198396503925323, + 0.0010156190255656838, + 0.024883389472961426, + 0.028158826753497124, + -0.061080921441316605, + -0.034638140350580215, + 0.07525738328695297, + -0.10333201289176941, + 0.1325017213821411 + ], + [ + -0.022495703771710396, + 0.016010025516152382, + 0.024266116321086884, + -0.003703402355313301, + -0.017527369782328606, + -0.159004807472229, + 0.016453547403216362, + -0.012924930080771446, + -0.0709749087691307, + -0.04518720507621765, + -0.022061670199036598, + 0.12056085467338562, + -0.0045554740354418755, + 0.00913986749947071, + 0.17876332998275757, + 0.0009227339178323746, + 0.01741109788417816, + 0.006726282648742199, + 0.08306974172592163, + -0.09147880971431732, + -0.05467434227466583, + 0.06329692900180817, + 0.004105379339307547, + -0.0337633453309536, + -0.049023646861314774, + 0.007342121563851833, + -0.024658430367708206, + 0.02406417578458786, + -0.014786751009523869, + 0.03931219130754471, + -0.01033793855458498, + 0.006478187628090382, + 0.0034783596638590097, + -0.0103599913418293, + 0.01392274722456932, + 0.005495754536241293, + 0.035516753792762756, + 0.045847903937101364, + 0.014752183109521866, + 0.00818663090467453, + 0.029752077534794807, + 0.044768448919057846, + 0.071299247443676, + -0.11924222111701965, + 0.03440502658486366 + ], + [ + 0.01037701778113842, + -0.026998430490493774, + -0.06586451083421707, + -0.015211832709610462, + 0.033266182988882065, + 0.0795116201043129, + 0.023093054071068764, + -0.01445830799639225, + -0.011426141485571861, + -0.023018935695290565, + 0.0039683012291789055, + -0.0024486822076141834, + 0.032019659876823425, + -0.0014701877953484654, + -0.002281958470121026, + -0.012504305690526962, + 0.004029049072414637, + -0.10711884498596191, + 0.01753457635641098, + 0.013889643363654613, + -0.05143135413527489, + -0.03475813940167427, + 0.0328337736427784, + 0.13636285066604614, + -0.10292953997850418, + -0.042261753231287, + -0.030638914555311203, + -0.012393152341246605, + 0.0007874646107666194, + -0.002357605379074812, + 0.022026734426617622, + -0.012309701181948185, + -0.03602808713912964, + -0.005990192759782076, + -0.01528380811214447, + -0.08939861506223679, + 0.024183819070458412, + 0.0006722258403897285, + 0.006495789159089327, + -0.04842047020792961, + 0.06676465272903442, + 0.04371637850999832, + 0.07582321017980576, + -0.05618206411600113, + -0.0683959349989891 + ], + [ + 0.0027228104881942272, + -0.007558085024356842, + 0.01986050419509411, + -0.0036453227512538433, + 0.011269617825746536, + 0.039507731795310974, + 0.014368223026394844, + 0.001139448257163167, + 0.008862998336553574, + -0.0046951002441346645, + 0.01741824671626091, + -0.007818960584700108, + 0.01849675364792347, + 0.01158775482326746, + -0.04309699311852455, + -0.013079572468996048, + -0.0006091155228205025, + -0.03756615146994591, + 0.028269313275814056, + 0.011830338276922703, + -0.0026700987946242094, + 0.043102916330099106, + -0.03120562434196472, + -0.09325245022773743, + 0.04077508673071861, + -0.022158967331051826, + -0.09522164613008499, + -0.04337034747004509, + 0.004554356448352337, + 0.02630382776260376, + -0.026951588690280914, + 0.023328863084316254, + 0.22439415752887726, + 0.00400937395170331, + 0.012677961029112339, + -0.06592120975255966, + 0.010069012641906738, + 0.02721044234931469, + 0.009045140817761421, + -0.017200231552124023, + 0.017642805352807045, + 0.04072513058781624, + 0.04390592873096466, + -0.010296802967786789, + -0.026480067521333694 + ], + [ + -0.0196752417832613, + -0.03059542551636696, + 0.004071312490850687, + 0.012630959041416645, + 0.012701338157057762, + 0.016266096383333206, + 0.011249222792685032, + -0.020901933312416077, + -0.048582132905721664, + 0.002174713183194399, + 0.037237558513879776, + 0.05071381852030754, + 0.009539234451949596, + -0.011311905458569527, + 0.010159273631870747, + 0.002807020675390959, + 0.0008882044930942357, + -0.022457411512732506, + -0.027037745341658592, + 0.024448513984680176, + -0.003671618876978755, + 0.02926153689622879, + 0.01011715643107891, + 0.04733146354556084, + -0.023861907422542572, + -0.04451662302017212, + -0.02474619448184967, + 0.006603296846151352, + 0.03811298683285713, + -0.026141753420233727, + 0.012986866757273674, + -0.015027341432869434, + 0.02867872081696987, + 0.012971485033631325, + 0.002738230861723423, + -0.016987551003694534, + -0.0756663978099823, + -0.03939340263605118, + -0.04692529886960983, + 0.06134684756398201, + -0.18201510608196259, + 0.019963862374424934, + 0.024278445169329643, + -0.0169772207736969, + 0.05542279779911041 + ], + [ + 0.008455779403448105, + 0.03601161018013954, + 0.03855350241065025, + 0.0034931846894323826, + -0.005980742163956165, + 0.016562286764383316, + -0.016562722623348236, + -0.012683814391493797, + -0.020969104021787643, + 0.015351788140833378, + 0.007111527491360903, + -0.04647205024957657, + -0.03062833473086357, + 0.008911609649658203, + 0.11017579585313797, + -0.006154295057058334, + -0.016679061576724052, + -0.0403323695063591, + -0.0021834878716617823, + 0.009101547300815582, + 0.08985194563865662, + -0.0793597400188446, + 0.052204471081495285, + 0.0013952577719464898, + 0.07903958112001419, + 0.030070308595895767, + -0.06708218157291412, + -0.009248117916285992, + -0.005016601178795099, + 0.01787707768380642, + 0.01874777115881443, + -0.007951648905873299, + -0.03321433439850807, + 0.02069927006959915, + 0.005988914053887129, + -0.07946880906820297, + -0.022785484790802002, + 0.03427620604634285, + 0.006891249213367701, + 0.009044891223311424, + -0.01240516360849142, + 0.020538274198770523, + -0.0030606375075876713, + 0.011045640334486961, + 0.004303240682929754 + ], + [ + 0.0036664451472461224, + -0.021467870101332664, + 0.00020952789054717869, + 0.0065444465726614, + 0.008865677751600742, + -0.04278237372636795, + 0.013052728958427906, + 0.03891315683722496, + -0.00607796898111701, + 0.014812156558036804, + -0.010518539696931839, + -0.01711435057222843, + 0.03571588173508644, + 0.036704450845718384, + 0.041875384747982025, + -0.0307945404201746, + 0.029675960540771484, + 0.054111041128635406, + -0.03833693265914917, + 0.08324743807315826, + 0.00279268273152411, + -0.043840985745191574, + -0.06718762218952179, + -0.0344025082886219, + 0.01800568960607052, + -0.03542892634868622, + -0.01444207038730383, + 0.019021261483430862, + 0.023896971717476845, + -0.05687994137406349, + 0.010798582807183266, + 0.0038682506419718266, + -0.025353148579597473, + -0.05920318886637688, + -0.03211173415184021, + 0.0004010381526313722, + 0.037328433245420456, + -0.012941398657858372, + -0.00783421192318201, + -0.04482981190085411, + -0.02423698641359806, + 0.03563130274415016, + 0.09118549525737762, + 0.02624489739537239, + -0.03152075782418251 + ], + [ + 0.0007554808398708701, + -0.05242738872766495, + 0.0014067788142710924, + 0.011390752159059048, + 0.021937567740678787, + -0.004478380084037781, + -0.016847755759954453, + -0.0027125380001962185, + -0.05561840906739235, + -0.010386303998529911, + -0.014461257494986057, + -0.07208764553070068, + -0.012688713148236275, + -0.018304653465747833, + 0.04230412840843201, + 0.025743871927261353, + -6.573282007593662e-05, + -0.005349533632397652, + -0.018145034089684486, + 0.09586136043071747, + -0.028106361627578735, + 0.036183856427669525, + -0.03890823945403099, + -0.007154957391321659, + 0.02438383176922798, + 0.006218439899384975, + 0.012302412651479244, + -0.034468039870262146, + 0.028795529156923294, + 0.008399764075875282, + -0.004131385125219822, + -0.017562665045261383, + -0.00013936882896814495, + 0.03910807520151138, + 6.251157174119726e-05, + 0.025713898241519928, + -0.06022397056221962, + -0.01746310293674469, + -0.01646556332707405, + 0.018420696258544922, + 0.06146717816591263, + -0.026877349242568016, + -0.023622363805770874, + -0.08989278972148895, + -0.0073748305439949036 + ], + [ + 0.030542856082320213, + 0.008811701089143753, + 0.0523078478872776, + 0.0067365653812885284, + 0.027505842968821526, + 0.0047425576485693455, + 0.010683344677090645, + 0.014453236944973469, + -0.006321418564766645, + 0.029211604967713356, + -0.0232774056494236, + -0.07627473026514053, + 0.0030010107439011335, + 0.012009093537926674, + 0.00051021424587816, + -0.006279981695115566, + 0.02611403726041317, + 0.011486154980957508, + 0.03012763150036335, + -0.03562299162149429, + -0.019505254924297333, + -0.00682585034519434, + -0.03566821664571762, + -0.003574620932340622, + -0.04172534495592117, + -0.023055287078022957, + 0.028453411534428596, + -0.02836139313876629, + -0.03357318416237831, + 0.09116940945386887, + -0.015319928526878357, + 0.02438412979245186, + -0.016869012266397476, + 0.005463107023388147, + -0.037935856729745865, + -0.020401470363140106, + -0.004871362820267677, + -0.00804493110626936, + -0.019882427528500557, + 0.0015647263498976827, + -0.04529079794883728, + 0.004697592929005623, + 0.0108138807117939, + 0.0012828774051740766, + 0.006596735212951899 + ], + [ + 0.005858828779309988, + -0.016593964770436287, + -0.030293909832835197, + -0.0017675489652901888, + 0.003695212071761489, + 0.04358220472931862, + -0.009942498058080673, + -0.022219518199563026, + -0.07844208180904388, + -0.010169927962124348, + -0.016561392694711685, + -0.024813547730445862, + 0.0012255803449079394, + -0.013036666437983513, + 0.000728949555195868, + 0.006982212420552969, + -0.007227342575788498, + -0.01262681558728218, + 0.0005772245931439102, + -0.0446879118680954, + 0.009557729586958885, + 0.017626583576202393, + 0.005579731427133083, + -0.0305457916110754, + 0.01703699305653572, + 0.07850301265716553, + 0.04634169489145279, + -0.006922584027051926, + -0.0015523477923125029, + -0.009070397354662418, + -0.0015999304596334696, + -0.005408989265561104, + -0.0021382628474384546, + -0.002667429158464074, + 0.028962355107069016, + 0.029028646647930145, + 0.011584458872675896, + -0.01618047058582306, + -0.007066205609589815, + -0.027669474482536316, + -0.030012264847755432, + 0.03296054154634476, + 0.05482836067676544, + 0.019482742995023727, + -0.033676858991384506 + ], + [ + -0.0021477786358445883, + -0.01811544969677925, + 0.00822137575596571, + 0.0008912881603464484, + 0.0048518371768295765, + 0.00026707572396844625, + 0.01708621345460415, + 0.0006847226759418845, + 0.00736237084493041, + 0.0144194345921278, + -0.024503272026777267, + 0.03498969227075577, + 0.001738202292472124, + 0.00010192584159085527, + 0.014449493028223515, + 0.001262195990420878, + -0.0012137452140450478, + -0.05094340071082115, + -0.06767641752958298, + -0.05372973531484604, + -0.012530145235359669, + -0.020315852016210556, + -0.050124071538448334, + -0.011806163936853409, + 0.046647876501083374, + -0.023046184331178665, + 0.06621181219816208, + 0.003629519371315837, + -0.036956630647182465, + -0.026524772867560387, + 0.016542410477995872, + -0.006295610219240189, + 0.013811145909130573, + -0.00569135183468461, + -0.015433442778885365, + -0.045816875994205475, + -0.011365693993866444, + 4.4254604290472344e-05, + -0.018294615671038628, + -0.0010185466380789876, + 0.008983682841062546, + -0.004091570619493723, + -0.023012524470686913, + -0.022986240684986115, + -0.006337582133710384 + ], + [ + -0.006680434104055166, + 0.0781208947300911, + -0.03560307249426842, + 0.01747014746069908, + 0.0038246747571974993, + 0.01575591415166855, + -0.016310250386595726, + 0.017044968903064728, + -0.020810458809137344, + -0.017268521711230278, + 0.050511594861745834, + -0.008982710540294647, + 0.021877462044358253, + -0.006228135898709297, + 0.0035072253085672855, + 0.0060678282752633095, + 0.037531591951847076, + -0.0007317958516068757, + -0.0026024957187473774, + -0.03375082463026047, + 0.016674943268299103, + -0.025238024070858955, + -0.012202970683574677, + -0.02889031358063221, + -0.011467299424111843, + -0.018403641879558563, + -0.0008077493403106928, + 0.008094809018075466, + 0.01859569363296032, + -0.004368297290056944, + -0.002272853162139654, + 0.007889257743954659, + 0.003111835103482008, + -0.02613692171871662, + -0.00930595863610506, + 0.005887841805815697, + -0.037920571863651276, + -0.033110376447439194, + -0.014921287074685097, + 0.022004052996635437, + 0.025661425665020943, + -0.025482140481472015, + 0.011318915523588657, + -0.02024848572909832, + -0.00804445892572403 + ], + [ + -0.0023323947098106146, + -0.03986965864896774, + -0.023009436205029488, + -0.029371894896030426, + 0.026156244799494743, + 0.025455931201577187, + 0.03612012043595314, + -0.02219763956964016, + -0.06387332826852798, + 0.021117234602570534, + -0.01819853112101555, + 0.005769097246229649, + -0.0005661676987074316, + 0.02720893733203411, + -0.0016807635547593236, + -0.013957783579826355, + 0.008374016731977463, + 0.015384451486170292, + 0.01985081098973751, + -0.010488774627447128, + 0.04797288402915001, + -0.01961948722600937, + -0.013332879170775414, + -0.012228702194988728, + -0.026425980031490326, + -0.01883620396256447, + -0.018522778525948524, + 0.029070226475596428, + -0.0035870876163244247, + -0.01141587644815445, + -0.005484623368829489, + 0.009538937360048294, + 0.007844988256692886, + -0.014600615948438644, + -0.015341932885348797, + 0.008447336032986641, + 0.006709011737257242, + 0.022441094741225243, + 0.002629294293001294, + 0.006978550925850868, + 0.015776541084051132, + -0.026218807324767113, + -0.03923872485756874, + 0.0001955090556293726, + 0.023533381521701813 + ], + [ + -0.007703295908868313, + 0.008345683105289936, + -0.010234742425382137, + 0.016737811267375946, + -0.011508629657328129, + 0.008886181749403477, + -0.015952015295624733, + 0.013445794582366943, + -0.004053363110870123, + -0.022865070030093193, + -0.0037585790269076824, + -0.014152067713439465, + 0.010187502950429916, + -0.018974917009472847, + 0.013702861033380032, + 0.00576101103797555, + 0.00015525757044088095, + -0.05282927304506302, + -0.003934428095817566, + 0.018432650715112686, + 0.0037050058599561453, + -0.005815625190734863, + -0.03881102427840233, + -0.008209442719817162, + -0.02596147544682026, + -0.017851268872618675, + 0.010695546865463257, + -0.0007560423691757023, + -0.01206777524203062, + 0.0008125992026180029, + -0.006552835926413536, + -0.013022815808653831, + -0.0004488170670811087, + -0.008348164148628712, + -0.00013128247519489378, + 0.03856707736849785, + 0.009184446185827255, + 0.05820004269480705, + 0.037482671439647675, + 0.04758414253592491, + -0.006201918702572584, + 0.04102705419063568, + -0.012194694951176643, + 0.03271199390292168, + 0.010727118700742722 + ], + [ + 0.008452201262116432, + -0.036472272127866745, + 0.020320884883403778, + -0.003595655784010887, + 0.0016662892885506153, + -0.022026635706424713, + -0.017016971483826637, + 0.030711114406585693, + 0.031180579215288162, + -0.0034158097114413977, + -0.016970504075288773, + -0.007747208699584007, + 0.012479030527174473, + -0.02783718705177307, + 0.003990172874182463, + 0.019525159150362015, + 0.00537873525172472, + -0.03924533724784851, + 0.03242785483598709, + -0.0014453479088842869, + 0.05218246579170227, + 0.006417648401111364, + -0.003966373857110739, + 0.0003704492119140923, + -0.0263835396617651, + 0.022614246234297752, + 0.006226354744285345, + 0.03456621617078781, + -0.0012203565565869212, + -0.015491430647671223, + -0.004601797088980675, + -0.003556341864168644, + 0.013310059905052185, + -0.015447961166501045, + -0.0028846205677837133, + -0.006352880969643593, + -0.02240452729165554, + -0.017737651243805885, + -0.009904802776873112, + -0.005678647663444281, + 0.0023511068429797888, + -0.03184409812092781, + 0.02212630957365036, + 0.005479009822010994, + 0.0005827452405355871 + ], + [ + 0.013699429109692574, + -0.0033424491994082928, + 0.02516511268913746, + -0.024059422314167023, + 0.011336086317896843, + 0.001995411003008485, + 0.032384857535362244, + -0.025773167610168457, + 0.006303270813077688, + -0.009568688459694386, + 0.028605926781892776, + 0.008808879181742668, + 0.008818162605166435, + -0.02372622862458229, + -0.0014211301458999515, + 0.009698646143078804, + 0.01846197247505188, + 0.01695738360285759, + 0.01138181984424591, + 0.012916128151118755, + 0.005945347249507904, + -0.01252772193402052, + -0.0033041308633983135, + -0.021015100181102753, + -0.012405123561620712, + -0.009321194142103195, + 0.014053457416594028, + -0.02565625123679638, + -0.025283608585596085, + 0.0008273700950667262, + 0.03841831535100937, + -0.04461786523461342, + -0.006019273307174444, + -0.00516040762886405, + 0.040852297097444534, + -0.0024584317579865456, + -0.010626666247844696, + 0.007327461149543524, + 0.0016056908061727881, + 0.003469041781499982, + 0.007411503698676825, + -0.0075955018401145935, + 0.01313801109790802, + 0.008599716238677502, + -0.0018994261045008898 + ], + [ + 0.0005498353275470436, + 0.0319436676800251, + 0.018350139260292053, + -0.017125563696026802, + 0.0034153282176703215, + 0.004779968410730362, + 0.045640673488378525, + -0.0047793020494282246, + 0.006410537287592888, + 0.015605989843606949, + 0.012619836255908012, + -0.0014300135662779212, + -0.006947851274162531, + 0.008033467456698418, + 0.011409820057451725, + -0.003566782223060727, + -0.004699454642832279, + -0.05236497148871422, + 0.0046944874338805676, + 0.025257842615246773, + 0.002994734328240156, + 0.015204668045043945, + 0.021089112386107445, + -0.02023189887404442, + -0.0010332213714718819, + -0.00785747915506363, + 0.017137425020337105, + 0.004313879180699587, + 0.010306312702596188, + -0.00681322580203414, + -0.0034331935457885265, + 0.017821529880166054, + -0.004885640926659107, + 0.002734225010499358, + -0.009505352936685085, + 0.035248950123786926, + 0.015221849083900452, + -0.009780111722648144, + -0.017998438328504562, + -0.024638639762997627, + -0.004241432528942823, + -0.0002386040287092328, + -0.006098208483308554, + -0.005580107681453228, + 0.0033594027627259493 + ], + [ + 0.011463717557489872, + -0.018766270950436592, + -8.590449579060078e-05, + 0.008514202199876308, + 0.0025223407428711653, + -0.004520431160926819, + -0.004915399011224508, + 0.0035596643574535847, + 0.004628830123692751, + -0.005701262969523668, + -0.015460366383194923, + 0.002019961131736636, + 0.002108558313921094, + -0.004662561230361462, + -0.005207268986850977, + 0.00492060324177146, + 0.0009433203376829624, + -0.01866292394697666, + 0.003009023144841194, + -0.02914227545261383, + -0.007199206855148077, + -0.030993036925792694, + -0.0009944384219124913, + 0.007819466292858124, + 0.02571331523358822, + -0.027489913627505302, + -0.0458693727850914, + -0.019027728587388992, + -0.008148781023919582, + 0.008761102333664894, + 0.001935663167387247, + -0.011475535109639168, + -0.004167452920228243, + -0.007355986628681421, + 0.012524465098977089, + 0.059944238513708115, + -0.0058122919872403145, + -0.008124226704239845, + -0.010307827964425087, + -0.023800605908036232, + -0.009315493516623974, + -0.00832737609744072, + -4.363518382888287e-05, + -0.00952251348644495, + -0.00479150889441371 + ], + [ + 0.007256949786096811, + -0.03601238131523132, + -0.008004694245755672, + 0.00789707712829113, + 0.02580997906625271, + -0.01090193074196577, + 0.0025381019804626703, + 0.03368000686168671, + -0.0003010774089489132, + -0.0074155586771667, + 0.023931991308927536, + -0.0072115082293748856, + 0.008573928847908974, + 0.015263441950082779, + 0.005148470867425203, + -0.015294834040105343, + 0.0012072762474417686, + -0.005141685716807842, + -0.0033261156640946865, + -0.01706104539334774, + -0.004741530865430832, + 0.007806902285665274, + 0.04825945943593979, + -0.01429178286343813, + 0.011948144994676113, + -0.018770279362797737, + 0.02372652105987072, + -0.01763378269970417, + 0.028210734948515892, + 0.006298302207142115, + 0.0014280491741374135, + -0.003089534817263484, + -0.0033092449884861708, + -0.0066254884004592896, + -0.002827590797096491, + -0.0018842555582523346, + -0.001385998446494341, + 0.021954011172056198, + 0.005023208912461996, + 0.013155384920537472, + 0.004307809751480818, + -0.009782231412827969, + 0.003535080701112747, + 0.013928093016147614, + -0.0024879719130694866 + ], + [ + 0.003366141114383936, + -0.001687322393991053, + 0.01570109836757183, + -0.015234448947012424, + -0.0031824191100895405, + 0.00045815485646016896, + 0.037793535739183426, + 0.005354998167604208, + -0.0071918657049536705, + -0.023681409657001495, + 0.010527492500841618, + -0.011286735534667969, + 0.012779594399034977, + -0.05186581239104271, + -0.002791260601952672, + 0.016553761437535286, + -0.005137853790074587, + 0.018263358622789383, + -0.006586691364645958, + -0.018161306157708168, + 0.0006730396416969597, + -0.0024377235677093267, + -0.0020296117290854454, + 0.01403796300292015, + 0.0181891992688179, + -0.002181856893002987, + -0.009124440141022205, + 0.008268313482403755, + 0.014520031400024891, + -0.011442477814853191, + -0.020665081217885017, + 0.004358141217380762, + -0.002436751965433359, + 0.019096149131655693, + -0.036353468894958496, + 0.0007185546564869583, + 0.009594779461622238, + 0.010843418538570404, + 0.004595187492668629, + 0.0050412374548614025, + 0.001194936572574079, + 0.003474273718893528, + 0.005747854709625244, + 0.0004006402159575373, + -0.005848734173923731 + ], + [ + 0.005553645547479391, + 0.014829201623797417, + -0.0030189836397767067, + 0.0019321298459544778, + -0.017568906769156456, + 0.00797270517796278, + 0.005635120905935764, + 0.01206644345074892, + -0.015796935185790062, + -0.018575400114059448, + -0.04890725389122963, + 0.0016979356296360493, + 0.003093397943302989, + 3.803123036050238e-05, + -0.005210451781749725, + 0.01655028946697712, + 0.014960705302655697, + 0.0072961971163749695, + 0.006164019927382469, + 0.016866367310285568, + -9.394375956617296e-05, + 0.00788196176290512, + 0.03728523850440979, + -0.004866194445639849, + 0.011433728970587254, + -0.030994391068816185, + 0.008317786268889904, + -0.0005908770835958421, + -0.028398267924785614, + -0.010313336737453938, + -0.004636738914996386, + 0.006355722900480032, + 0.0020106921438127756, + -0.0023178497795015574, + 0.002649511443451047, + -0.007435911800712347, + -0.007661172188818455, + -0.0035062299575656652, + -0.0012476628180593252, + 0.009976162575185299, + 0.0006027839845046401, + -0.00467014592140913, + 0.009400233626365662, + 0.005134325008839369, + -0.0012779058888554573 + ], + [ + 0.016268962994217873, + 0.0030379893723875284, + -0.0016002239426597953, + -0.0017761185299605131, + -0.011487155221402645, + 0.0018621287308633327, + -0.007008627522736788, + -0.025422925129532814, + -0.0014716434525325894, + 0.013461830094456673, + 0.0006149964756332338, + -0.0025843719486147165, + -0.005991258658468723, + -0.00429184827953577, + 0.002218527952209115, + -0.01678687147796154, + -0.03942489251494408, + 0.0030056287068873644, + 0.010203512385487556, + -0.004870238713920116, + 0.003717053448781371, + 0.01388748362660408, + -0.004829874727874994, + 0.009799350053071976, + 0.015065329149365425, + -0.028329303488135338, + 0.011301170103251934, + 0.01473788172006607, + 0.005888659041374922, + 0.01177951693534851, + 0.008465822786092758, + -0.009206120856106281, + 0.003913552034646273, + -0.00923006422817707, + -0.014665525406599045, + 0.008997058495879173, + -0.0203713309019804, + -0.007739989552646875, + -0.0003181488427799195, + 0.006230671890079975, + 0.024753211066126823, + 0.005024408455938101, + 0.030169211328029633, + 0.018536392599344254, + 0.01616176776587963 + ], + [ + -0.01910393126308918, + -0.00871589407324791, + 0.0007106777629815042, + 0.011063016019761562, + -0.014131869189441204, + -0.0008084847358986735, + -0.015428530983626842, + -0.013651038520038128, + 0.003771252231672406, + -0.0029813917353749275, + 0.0006631190772168338, + -0.01307733729481697, + 0.0011754566803574562, + -0.003673878964036703, + 0.0005424732225947082, + 0.012685926631093025, + 0.01106881070882082, + 0.004886738024652004, + 0.018098043277859688, + -0.009469973854720592, + 0.01951996050775051, + -0.0028987592086195946, + 0.00014805985847488046, + -0.005811454262584448, + -0.0007206271402537823, + -0.03397194668650627, + 0.017788013443350792, + 0.0011831517331302166, + 0.024638516828417778, + -0.005374486092478037, + 0.00321221468038857, + -0.011132437735795975, + 0.000902124447748065, + 0.0193793922662735, + 0.008925841189920902, + -0.008256366476416588, + 0.024065475910902023, + -0.011761298403143883, + -0.009709605947136879, + -0.017639119178056717, + 0.0005737878382205963, + 0.02368728071451187, + -0.014572097919881344, + -0.005914192646741867, + 0.0058142333291471004 + ], + [ + 0.006649153307080269, + -0.011156578548252583, + 0.008667523972690105, + -0.0024249095004051924, + -0.01574135012924671, + -0.0011882235994562507, + 0.0017571536591276526, + -0.010584973730146885, + 0.004354995675384998, + -0.0049141691997647285, + -0.0049272626638412476, + -0.005947389639914036, + 0.00042120664147660136, + -0.0038913593161851168, + 0.0007645332952961326, + -0.010488866828382015, + -0.022886788472533226, + -0.001011396641843021, + -0.0007216680096462369, + -0.0016741305589675903, + 0.0006031721131876111, + -0.010788626037538052, + 0.011402606032788754, + -0.01740707829594612, + -0.011967460624873638, + 0.0019294061930850148, + -0.004990001674741507, + -0.00919059943407774, + 0.0014225010527297854, + -0.0027023048605769873, + 0.00430525466799736, + -0.0008500043186359107, + -0.00021649774862453341, + -0.02432853728532791, + -0.008758123964071274, + -0.00063912762561813, + 0.021570000797510147, + -0.031498074531555176, + 0.0023175596725195646, + 0.03836365416646004, + -0.004075958859175444, + 0.0053198630921542645, + -0.012724303640425205, + -0.016394853591918945, + -0.023500945419073105 + ], + [ + 0.025469960644841194, + -0.002241270150989294, + -0.0004540280206128955, + 0.0009244878892786801, + -0.00957506150007248, + -0.0016278807306662202, + -0.013423891738057137, + -0.017305035144090652, + 0.0015235238242894411, + -0.01020258292555809, + -0.007669116836041212, + 0.0051340158097445965, + -0.0010782890021800995, + -0.0064316983334720135, + -0.0004841153568122536, + -0.0019476833986118436, + 0.004271430429071188, + -0.0017103132558986545, + -0.013322128914296627, + 0.0011082219425588846, + -0.011887851171195507, + -0.01331858430057764, + 0.013245712965726852, + -0.024600394070148468, + -0.02362148091197014, + 0.009075438603758812, + -0.0033172816038131714, + -0.013799937441945076, + 0.009887240827083588, + -0.010585967451334, + -0.01044811587780714, + -0.009472894482314587, + 0.0005284295184537768, + -0.001361248199827969, + -0.02594010718166828, + -0.005964653566479683, + -0.014829547144472599, + 0.007003128994256258, + -0.007044685073196888, + -0.022479651495814323, + 0.0024220854975283146, + 0.006172834429889917, + -0.006807849742472172, + 0.0051607307977974415, + 0.02364627830684185 + ], + [ + -0.01351979747414589, + -0.01071852445602417, + 0.012320837005972862, + -0.016693919897079468, + -0.006019806023687124, + -0.0020079852547496557, + -0.0005733276484534144, + -0.013488642871379852, + 0.0022639636881649494, + -0.015679996460676193, + 0.0021946877241134644, + -0.014166414737701416, + -0.011975525878369808, + -0.0002792203158605844, + -0.0011187032796442509, + 0.004500983282923698, + -0.0045900000259280205, + -0.0030179214663803577, + -0.01949739269912243, + -0.005226042587310076, + -0.01172392163425684, + -0.010197860188782215, + -0.00011359209747752175, + -0.00680533517152071, + -0.008970695547759533, + -0.011310221627354622, + -0.0019072643481194973, + 0.012785663828253746, + 0.009152069687843323, + 0.003070997539907694, + -0.016211656853556633, + 0.034127477556467056, + -0.007422042544931173, + -0.010613166727125645, + 0.03430658206343651, + -0.005655908957123756, + -0.01145249791443348, + 0.0023543487768620253, + 0.0009709355654194951, + -0.0010431264527142048, + 0.004303919151425362, + -0.003024874720722437, + 0.008601179346442223, + 0.006732244975864887, + 0.00464903749525547 + ], + [ + -0.0018222297076135874, + 0.008018524385988712, + -0.0018628957914188504, + 0.0042664408683776855, + -0.006818541791290045, + 0.0037505687214434147, + -0.002596283331513405, + 0.021230431273579597, + -0.012844810262322426, + -0.009535901248455048, + 0.007141975220292807, + -0.0073865195736289024, + 0.005147598218172789, + -0.018142197281122208, + 0.0008456947398371994, + -0.014044527895748615, + -0.03152196854352951, + 0.00707216328009963, + 0.0026926815044134855, + 0.006269850768148899, + -0.0017533364007249475, + 0.0038201462011784315, + -0.00037060832255519927, + -0.00010697160905692726, + -0.0029500171076506376, + -0.00594440707936883, + 0.0039031344931572676, + 0.00724786426872015, + -0.009470655582845211, + 0.0054483539424836636, + -0.00194179720710963, + -0.010519498027861118, + 0.001662794267758727, + -0.02456548810005188, + 0.012444569729268551, + -0.010108706541359425, + 0.004587135743349791, + 0.013706480152904987, + -0.01263850275427103, + -0.026247264817357063, + -0.011768225580453873, + -0.007999289780855179, + -0.017324434593319893, + -0.012651980854570866, + 0.003926132805645466 + ], + [ + -0.020188461989164352, + 0.006698640063405037, + 0.004112639930099249, + 0.0007280276040546596, + 0.002634846605360508, + 0.0005515202647075057, + 0.02123960293829441, + 0.019464543089270592, + -0.0001587762963026762, + -0.008599312044680119, + -0.018271788954734802, + -0.005116484593600035, + -0.011170112527906895, + 0.008449687622487545, + -0.004154965281486511, + -0.004302148707211018, + -0.0056588854640722275, + -0.0011641217861324549, + -0.0007424535579048097, + -0.0075840638019144535, + -0.0027921327855437994, + -0.002816914115101099, + -0.006281680427491665, + 0.005265111103653908, + -0.004990534391254187, + 0.012470233254134655, + -0.007389815989881754, + -0.013034018687903881, + 0.014276803471148014, + -0.0028325908351689577, + 0.00015733606414869428, + -0.02050873078405857, + 0.000681033416185528, + -0.004920493811368942, + 0.006407404784113169, + -0.0035581106785684824, + -5.272514317766763e-05, + -0.00918152742087841, + -0.02351958677172661, + 0.01028912141919136, + 0.01544911041855812, + 0.009533251635730267, + 0.003677165601402521, + 0.012997119687497616, + 0.021199379116296768 + ], + [ + -0.004125118721276522, + -0.002716755261644721, + 0.002079974627122283, + -0.012317268177866936, + 0.01793847046792507, + -0.001064602518454194, + -0.011365224607288837, + -0.009342136792838573, + 0.002003356348723173, + -0.012669265270233154, + 0.008233653381466866, + -0.004353824071586132, + -0.0006793754873797297, + 0.00042605173075571656, + -0.0007317265844903886, + 0.018481364473700523, + 0.011487459763884544, + -1.1120008821308147e-05, + 0.00423803785815835, + 0.0026452094316482544, + -0.00037203254760243, + 0.01218781154602766, + 0.0036872359924018383, + 0.012102054432034492, + 0.016199873760342598, + 0.005179674830287695, + -0.0008002324029803276, + -0.0035961114335805178, + -0.008391482755541801, + 0.0016886505763977766, + 0.010650482028722763, + 0.01012666430324316, + 0.0008882181136868894, + -0.03662685677409172, + -0.013940184377133846, + 0.00019492879800964147, + 0.0035409636329859495, + -0.0030869427137076855, + -0.007558043114840984, + -0.0018918249988928437, + 0.0034307725727558136, + 0.018756462261080742, + -0.01163308322429657, + 0.0016895552398636937, + 0.013370579108595848 + ], + [ + -0.005524301901459694, + 0.000977616640739143, + -0.0042923144064843655, + -0.0044611855410039425, + 0.03307471424341202, + -0.004687427543103695, + -0.011328428983688354, + 0.006677189841866493, + 0.001559663680382073, + -0.01713346131145954, + 0.0063633667305111885, + 0.005269331391900778, + -0.01711510866880417, + -0.00409148633480072, + -0.0010997614590451121, + -0.009250601753592491, + -0.017806652933359146, + 0.003262654412537813, + 0.002498725662007928, + 0.003155801212415099, + 0.00750375771895051, + -0.006839435547590256, + -3.0091141525190324e-05, + -0.00972696766257286, + -0.005752003286033869, + -0.008066670969128609, + 0.0008695494034327567, + -0.0028020453173667192, + -0.024329842999577522, + -0.010055238381028175, + -0.00648196367546916, + 0.00644444627687335, + -0.0021590455435216427, + 0.02172566018998623, + -0.005145265720784664, + 0.0022447656374424696, + 0.0009226002148352563, + -0.013772794045507908, + -0.00182228849735111, + -0.0007712747901678085, + 0.0011832761811092496, + 0.010212586261332035, + 0.003960642497986555, + 0.001467913738451898, + 0.0018004805315285921 + ], + [ + 0.03515428304672241, + 0.011563684791326523, + -5.127954136696644e-05, + -0.008626585826277733, + 0.02314571663737297, + -6.98980875313282e-05, + -0.00612989068031311, + 0.004387413617223501, + 0.001388968201354146, + 0.006792738102376461, + -0.012103176675736904, + 0.005686194635927677, + -0.005020753014832735, + -0.008832537569105625, + -0.001394168590195477, + -0.0007854663417674601, + 0.006541788578033447, + 0.0008951985510066152, + -0.003009835258126259, + 0.0010391295654699206, + -0.002785543678328395, + 0.0005243027117103338, + -0.00155424396507442, + 0.0019627693109214306, + 0.0042829508893191814, + -0.0028527246322482824, + -0.0003181783249601722, + 0.018253978341817856, + 0.018093591555953026, + 0.00162507442291826, + -0.011009054258465767, + -0.008197661489248276, + 0.0004319990111980587, + -0.0015664614038541913, + 0.020246168598532677, + -0.002907453803345561, + 0.00868506170809269, + -0.0005308574764057994, + -0.006167100742459297, + 0.009623652324080467, + 0.00046505016507580876, + 0.012614063918590546, + -0.007564898580312729, + -0.0036153290420770645, + -0.0028734186198562384 + ], + [ + -0.006848630495369434, + 0.014676680788397789, + 0.009769118390977383, + -0.012179943732917309, + 0.012238305993378162, + -0.00032428783015348017, + -0.0036104496102780104, + -0.005420226138085127, + 0.002557483036071062, + -0.017464030534029007, + -0.016143344342708588, + -0.0009277384378947318, + -0.009015277028083801, + 0.013943755067884922, + -0.00460736732929945, + 0.002973730443045497, + -0.0087607866153121, + 0.0005613718531094491, + 0.0036566469352692366, + -0.0045868949964642525, + 0.0047304825857281685, + 0.0024165157228708267, + -0.006300253327935934, + 0.0016411547549068928, + -0.001050203456543386, + -0.00394306518137455, + 0.005538031924515963, + -0.004923299886286259, + 0.020530205219984055, + -0.0038366327062249184, + 0.007140693254768848, + -0.010964092798531055, + 0.0011853117030113935, + -0.005705537740141153, + -0.008157758973538876, + -0.0002241577603854239, + -0.009419707581400871, + 0.0068698907271027565, + 0.023548549041152, + -0.008388974703848362, + -0.0130734508857131, + -0.010252144187688828, + 1.2216474942761124e-06, + -0.00706624798476696, + -0.013854717835783958 + ], + [ + 0.01693136803805828, + 0.0031372110825031996, + 0.0032617931719869375, + -0.0029004744719713926, + -0.0023515133652836084, + 0.0012261347146704793, + -0.0008195796981453896, + 0.0045450902543962, + -0.0002698961179703474, + -0.012286235578358173, + -0.004514013882726431, + 0.002021311316639185, + 0.02715420164167881, + 0.009062565863132477, + 0.002124266466125846, + -0.0019582815002650023, + -0.012648486532270908, + 0.00198007351718843, + 0.005776786711066961, + -0.002679277677088976, + 0.004615813959389925, + 0.0010468489490449429, + -0.009104210883378983, + 0.004296170547604561, + -0.0009241654188372195, + 0.0032640951685607433, + 0.0007544599939137697, + -0.011452611535787582, + 0.006633649580180645, + -0.009103978984057903, + 0.020047502592206, + 0.024611419066786766, + -0.002058004029095173, + 0.011610561981797218, + 0.008307352662086487, + 0.0008427408756688237, + -0.0020841893274337053, + 0.0008554468513466418, + -0.005969997029751539, + 0.0012433535885065794, + 0.002217241795733571, + 0.003036477603018284, + -0.006090730894356966, + 0.000806248455774039, + 0.0069640884175896645 + ], + [ + -0.011200237087905407, + 0.002682264195755124, + 0.005129658617079258, + 0.01287020742893219, + 0.015529340133070946, + -0.002236587693914771, + 0.0033685883972793818, + -0.01608942449092865, + 0.005459952168166637, + -0.007895588874816895, + -0.007723725866526365, + -0.007836567237973213, + 0.028264129534363747, + 0.0039421068504452705, + 0.0007943419041112065, + -0.008082963526248932, + -0.0012483496684581041, + -0.00024735109764151275, + -0.007795915473252535, + 0.0006657515186816454, + -0.004122520796954632, + -0.00032066713902167976, + 0.009191579185426235, + -0.002754528308287263, + 0.0012004548916593194, + 0.005048173945397139, + -0.0024772307369858027, + 0.019257336854934692, + -0.004682609811425209, + 0.0063582295551896095, + -0.0013501865323632956, + -0.009260028600692749, + 0.002344950335100293, + 0.0036942947190254927, + 0.0008335068705491722, + 0.0024531492963433266, + -0.0011307316599413753, + -0.0028964411467313766, + 0.0007962941308505833, + -0.0002912830968853086, + 0.0014758592005819082, + 0.0003035767294932157, + -0.0029652034863829613, + 0.0007014012662693858, + 0.002067896071821451 + ], + [ + 0.006195953115820885, + -0.004878598265349865, + 0.0003935272397939116, + -0.004443035461008549, + -0.007727185729891062, + 0.000876554346177727, + 0.009658520109951496, + 0.01595279574394226, + -0.003571760607883334, + -0.001960332738235593, + 0.005607483442872763, + -0.0023387304972857237, + -0.00044839971815235913, + 0.006612768862396479, + -0.00036678201286122203, + 0.0020738537423312664, + 0.001467038644477725, + 0.0005324701778590679, + -0.0030059574637562037, + -0.0018900817958638072, + -0.0018344158306717873, + -0.0016157120699062943, + 0.001982525922358036, + -0.0015301307430490851, + -0.0006320856045931578, + 0.0015300542581826448, + -0.0004552277678158134, + 0.011698273941874504, + -0.00291864899918437, + 0.005847232416272163, + 0.005462152883410454, + -0.00436872523277998, + 0.0020734984427690506, + 0.0019671269692480564, + 0.0017167485784739256, + 0.0006844321615062654, + -0.011944874189794064, + -0.018293635919690132, + 0.024431472644209862, + -0.008728659711778164, + 0.001507685985416174, + 0.0172902699559927, + -0.008215625770390034, + -0.002193963620811701, + 0.006942014209926128 + ], + [ + -0.006958430632948875, + 0.0022303597070276737, + -0.005184312351047993, + 0.012648834846913815, + 0.011840980499982834, + -0.001093623461201787, + 0.004257561638951302, + 0.0024148873053491116, + 6.128253517090343e-06, + 0.015434450469911098, + -0.009783624671399593, + 0.001900278264656663, + -0.004381220322102308, + -0.01729026436805725, + 0.0005190964438952506, + 0.014209604822099209, + -0.00821677315980196, + 0.0034260055981576443, + -0.0032596851233392954, + 0.00029968336457386613, + -0.0037785160820931196, + -0.0056799608282744884, + 0.00553898373618722, + -0.007639668881893158, + -0.00702810101211071, + -0.00034543455694802105, + -0.0021905112080276012, + 0.00044441065983846784, + 0.007309116888791323, + 0.0037157414481043816, + 0.020854750648140907, + 0.011359392665326595, + 0.003255574032664299, + -0.0026862621307373047, + -0.0005511222407221794, + -0.00021524043404497206, + -0.0009734127670526505, + 0.0015755231725052, + 0.006016546860337257, + -0.0024158258456736803, + 0.0008244782220572233, + 0.0018912493251264095, + 0.0010421037441119552, + 0.0010570120066404343, + 0.0015601996565237641 + ], + [ + 0.003647628240287304, + 0.00018447083130013198, + -0.002302320674061775, + 0.019668636843562126, + 0.005486125126481056, + -0.0024349757004529238, + 0.012231294997036457, + -0.00772100267931819, + 0.003329834435135126, + 0.0013343141181394458, + -0.001208956353366375, + 0.0020790330599993467, + 6.166790990391746e-05, + -0.004452846944332123, + 0.0005777273909188807, + -0.005199058447033167, + 0.0003697117790579796, + 0.00018579346942715347, + 0.0067620184272527695, + 3.727666990016587e-05, + 0.00733867147937417, + 0.003955514635890722, + -0.004073562566190958, + 0.004260346293449402, + 0.0031335647217929363, + 0.00019730959320440888, + 0.002632821211591363, + -0.013581051491200924, + 0.0005630888626910746, + -0.007590027526021004, + -0.016562901437282562, + 0.005802590399980545, + -0.005058777052909136, + -0.009642614983022213, + 0.007131681311875582, + -0.0044797747395932674, + -0.005954313091933727, + -0.005144465249031782, + 0.01214740239083767, + -0.0032915498595684767, + 0.00021614471916109324, + 0.002521343994885683, + -0.0029287212528288364, + -0.0007636473746970296, + 0.0029460457153618336 + ], + [ + -0.011385489255189896, + 0.0016915265005081892, + 0.001666159019805491, + -0.015864713117480278, + 0.00021571248362306505, + 0.0007149733137339354, + -0.008138676173985004, + 0.002941141603514552, + -0.0008013822371140122, + 0.009584633633494377, + -0.007182879839092493, + 0.0018052182858809829, + 0.008119347505271435, + -0.014607059769332409, + 0.0010591051541268826, + -0.01992860622704029, + 0.008581474423408508, + -0.00114027492236346, + 0.0008870703750289977, + -0.0003767914022319019, + -0.0006957599543966353, + 0.001548565924167633, + 0.002454499015584588, + 0.0003648319689091295, + 0.0022865652572363615, + -0.00047113915206864476, + 0.0003511659160722047, + -0.00813040230423212, + 0.0025104819796979427, + -0.0006113907438702881, + 0.00017049889720510691, + 0.0026401046197861433, + -0.0013247268507257104, + -0.00026928307488560677, + 0.0008339969790540636, + 0.0011175669496878982, + -0.0039907181635499, + -0.003607675665989518, + 0.006059848703444004, + -0.0009034615359269083, + 0.0009394243825227022, + 0.0025823896285146475, + -0.0020878673531115055, + -0.00018126489885617048, + 0.00236976589076221 + ], + [ + 0.004028908908367157, + -0.0017049856251105666, + -5.370379949454218e-05, + 0.009394863620400429, + -0.0027828046586364508, + -0.00041421252535656095, + 0.004086229484528303, + -0.0026524735148996115, + 0.0008281071786768734, + -0.012737114913761616, + -0.0015140045434236526, + -0.0029696577694267035, + -0.013778825290501118, + -0.005635555367916822, + -0.0025300418492406607, + -0.017714232206344604, + 0.01195415761321783, + -0.0011275607394054532, + 0.00019706611055880785, + 0.00029546022415161133, + 0.0010664897272363305, + 0.001903712167404592, + -0.0012171977432444692, + 0.001963118091225624, + 0.0015075091505423188, + 0.0019480364862829447, + 3.5159191611455753e-05, + 0.007157324813306332, + 0.0012632799334824085, + 0.0013232915662229061, + 0.014227595180273056, + 0.005578646436333656, + 0.002583068795502186, + 0.0010458779288455844, + -0.0015559825114905834, + 0.0011084281140938401, + 0.0011413419852033257, + 0.0017934181960299611, + -0.001995643600821495, + -0.0004406919761095196, + -0.0009208093979395926, + -0.0016424627974629402, + -0.0008877772488631308, + -0.0008989704656414688, + -0.0007891377899795771 + ] + ] +} \ No newline at end of file diff --git a/data_utils/lower_body.py b/data_utils/lower_body.py new file mode 100644 index 0000000000000000000000000000000000000000..501a83c7c83bbcd97c6dee09b809b1c75be45213 --- /dev/null +++ b/data_utils/lower_body.py @@ -0,0 +1,143 @@ +import numpy as np +import torch + +lower_pose = torch.tensor( + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.0747, -0.0158, -0.0152, -1.1826512813568115, 0.23866955935955048, + 0.15146760642528534, -1.2604516744613647, -0.3160211145877838, + -0.1603458970785141, 1.1654603481292725, 0.0, 0.0, 1.2521806955337524, 0.041598282754421234, -0.06312154978513718, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) +lower_pose_stand = torch.tensor([ + 8.9759e-04, 7.1074e-04, -5.9163e-06, 8.9759e-04, 7.1074e-04, -5.9163e-06, + 3.0747, -0.0158, -0.0152, + -3.6665e-01, -8.8455e-03, 1.6113e-01, -3.6665e-01, -8.8455e-03, 1.6113e-01, + -3.9716e-01, -4.0229e-02, -1.2637e-01, + 7.9163e-01, 6.8519e-02, -1.5091e-01, 7.9163e-01, 6.8519e-02, -1.5091e-01, + 7.8632e-01, -4.3810e-02, 1.4375e-02, + -1.0675e-01, 1.2635e-01, 1.6711e-02, -1.0675e-01, 1.2635e-01, 1.6711e-02, ]) +# lower_pose_stand = torch.tensor( +# [6.4919e-02, 3.3018e-02, 1.7485e-02, 8.9759e-04, 7.1074e-04, -5.9163e-06, +# 3.0747, -0.0158, -0.0152, +# -3.3633e+00, -9.3915e-02, 3.0996e-01, -3.6665e-01, -8.8455e-03, 1.6113e-01, +# 1.1654603481292725, 0.0, 0.0, +# 4.4167e-01, 6.7183e-03, -3.6379e-03, 7.9163e-01, 6.8519e-02, -1.5091e-01, +# 0.0, 0.0, 0.0, +# 2.2910e-02, -2.4797e-02, -5.5657e-03, -1.0675e-01, 1.2635e-01, 1.6711e-02,]) +lower_body = [0, 1, 3, 4, 6, 7, 9, 10] +count_part = [6, 9, 12, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, + 29, 30, 31, 32, 33, 34, 35, 36, 37, + 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54] +fix_index = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, + 29, + 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, + 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, + 65, 66, 67, 68, 69, 70, 71, 72, 73, 74] +all_index = np.ones(275) +all_index[fix_index] = 0 +c_index = [] +i = 0 +for num in all_index: + if num == 1: + c_index.append(i) + i = i + 1 +c_index = np.asarray(c_index) + +fix_index_3d = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, + 21, 22, 23, 24, 25, 26, + 30, 31, 32, 33, 34, 35, + 45, 46, 47, 48, 49, 50] +all_index_3d = np.ones(165) +all_index_3d[fix_index_3d] = 0 +c_index_3d = [] +i = 0 +for num in all_index_3d: + if num == 1: + c_index_3d.append(i) + i = i + 1 +c_index_3d = np.asarray(c_index_3d) + +c_index_6d = [] +i = 0 +for num in all_index_3d: + if num == 1: + c_index_6d.append(2*i) + c_index_6d.append(2 * i + 1) + i = i + 1 +c_index_6d = np.asarray(c_index_6d) + + +def part2full(input, stand=False): + if stand: + # lp = lower_pose_stand.unsqueeze(dim=0).repeat(input.shape[0], 1).to(input.device) + lp = torch.zeros_like(lower_pose) + lp[6:9] = torch.tensor([3.0747, -0.0158, -0.0152]) + lp = lp.unsqueeze(dim=0).repeat(input.shape[0], 1).to(input.device) + else: + lp = lower_pose.unsqueeze(dim=0).repeat(input.shape[0], 1).to(input.device) + + input = torch.cat([input[:, :3], + lp[:, :15], + input[:, 3:6], + lp[:, 15:21], + input[:, 6:9], + lp[:, 21:27], + input[:, 9:12], + lp[:, 27:], + input[:, 12:]] + , dim=1) + return input + + +def pred2poses(input, gt): + input = torch.cat([input[:, :3], + gt[0:1, 3:18].repeat(input.shape[0], 1), + input[:, 3:6], + gt[0:1, 21:27].repeat(input.shape[0], 1), + input[:, 6:9], + gt[0:1, 30:36].repeat(input.shape[0], 1), + input[:, 9:12], + gt[0:1, 39:45].repeat(input.shape[0], 1), + input[:, 12:]] + , dim=1) + return input + + +def poses2poses(input, gt): + input = torch.cat([input[:, :3], + gt[0:1, 3:18].repeat(input.shape[0], 1), + input[:, 18:21], + gt[0:1, 21:27].repeat(input.shape[0], 1), + input[:, 27:30], + gt[0:1, 30:36].repeat(input.shape[0], 1), + input[:, 36:39], + gt[0:1, 39:45].repeat(input.shape[0], 1), + input[:, 45:]] + , dim=1) + return input + +def poses2pred(input, stand=False): + if stand: + lp = lower_pose_stand.unsqueeze(dim=0).repeat(input.shape[0], 1).to(input.device) + # lp = torch.zeros_like(lower_pose).unsqueeze(dim=0).repeat(input.shape[0], 1).to(input.device) + else: + lp = lower_pose.unsqueeze(dim=0).repeat(input.shape[0], 1).to(input.device) + input = torch.cat([input[:, :3], + lp[:, :15], + input[:, 18:21], + lp[:, 15:21], + input[:, 27:30], + lp[:, 21:27], + input[:, 36:39], + lp[:, 27:], + input[:, 45:]] + , dim=1) + return input + + +rearrange = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21]\ + # ,22, 23, 24, 25, 40, 26, 41, + # 27, 42, 28, 43, 29, 44, 30, 45, 31, 46, 32, 47, 33, 48, 34, 49, 35, 50, 36, 51, 37, 52, 38, 53, 39, 54, 55, + # 57, 56, 59, 58, 60, 63, 61, 64, 62, 65, 66, 71, 67, 72, 68, 73, 69, 74, 70, 75] + +symmetry = [0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1]#, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + # 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + # 1, 1, 1, 1, 1, 1] diff --git a/data_utils/mesh_dataset.py b/data_utils/mesh_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..9d19c1e512e3e1aaed645791acf88345af4c9bb9 --- /dev/null +++ b/data_utils/mesh_dataset.py @@ -0,0 +1,348 @@ +import pickle +import sys +import os + +sys.path.append(os.getcwd()) + +import json +from glob import glob +from data_utils.utils import * +import torch.utils.data as data +from data_utils.consts import speaker_id +from data_utils.lower_body import count_part +import random +from data_utils.rotation_conversion import axis_angle_to_matrix, matrix_to_rotation_6d + +with open('data_utils/hand_component.json') as file_obj: + comp = json.load(file_obj) + left_hand_c = np.asarray(comp['left']) + right_hand_c = np.asarray(comp['right']) + + +def to3d(data): + left_hand_pose = np.einsum('bi,ij->bj', data[:, 75:87], left_hand_c[:12, :]) + right_hand_pose = np.einsum('bi,ij->bj', data[:, 87:99], right_hand_c[:12, :]) + data = np.concatenate((data[:, :75], left_hand_pose, right_hand_pose), axis=-1) + return data + + +class SmplxDataset(): + ''' + creat a dataset for every segment and concat. + ''' + + def __init__(self, + data_root, + speaker, + motion_fn, + audio_fn, + audio_sr, + fps, + feat_method='mel_spec', + audio_feat_dim=64, + audio_feat_win_size=None, + + train=True, + load_all=False, + split_trans_zero=False, + limbscaling=False, + num_frames=25, + num_pre_frames=25, + num_generate_length=25, + context_info=False, + convert_to_6d=False, + expression=False, + config=None, + am=None, + am_sr=None, + whole_video=False + ): + + self.data_root = data_root + self.speaker = speaker + + self.feat_method = feat_method + self.audio_fn = audio_fn + self.audio_sr = audio_sr + self.fps = fps + self.audio_feat_dim = audio_feat_dim + self.audio_feat_win_size = audio_feat_win_size + self.context_info = context_info # for aud feat + self.convert_to_6d = convert_to_6d + self.expression = expression + + self.train = train + self.load_all = load_all + self.split_trans_zero = split_trans_zero + self.limbscaling = limbscaling + self.num_frames = num_frames + self.num_pre_frames = num_pre_frames + self.num_generate_length = num_generate_length + # print('num_generate_length ', self.num_generate_length) + + self.config = config + self.am_sr = am_sr + self.whole_video = whole_video + load_mode = self.config.dataset_load_mode + + if load_mode == 'pickle': + raise NotImplementedError + + elif load_mode == 'csv': + import pickle + with open(data_root, 'rb') as f: + u = pickle._Unpickler(f) + data = u.load() + self.data = data[0] + if self.load_all: + self._load_npz_all() + + elif load_mode == 'json': + self.annotations = glob(data_root + '/*pkl') + if len(self.annotations) == 0: + raise FileNotFoundError(data_root + ' are empty') + self.annotations = sorted(self.annotations) + self.img_name_list = self.annotations + + if self.load_all: + self._load_them_all(am, am_sr, motion_fn) + + def _load_npz_all(self): + self.loaded_data = {} + self.complete_data = [] + data = self.data + shape = data['body_pose_axis'].shape[0] + self.betas = data['betas'] + self.img_name_list = [] + for index in range(shape): + img_name = f'{index:6d}' + self.img_name_list.append(img_name) + + jaw_pose = data['jaw_pose'][index] + leye_pose = data['leye_pose'][index] + reye_pose = data['reye_pose'][index] + global_orient = data['global_orient'][index] + body_pose = data['body_pose_axis'][index] + left_hand_pose = data['left_hand_pose'][index] + right_hand_pose = data['right_hand_pose'][index] + + full_body = np.concatenate( + (jaw_pose, leye_pose, reye_pose, global_orient, body_pose, left_hand_pose, right_hand_pose)) + assert full_body.shape[0] == 99 + if self.convert_to_6d: + full_body = to3d(full_body) + full_body = torch.from_numpy(full_body) + full_body = matrix_to_rotation_6d(axis_angle_to_matrix(full_body)) + full_body = np.asarray(full_body) + if self.expression: + expression = data['expression'][index] + full_body = np.concatenate((full_body, expression)) + # full_body = np.concatenate((full_body, non_zero)) + else: + full_body = to3d(full_body) + if self.expression: + expression = data['expression'][index] + full_body = np.concatenate((full_body, expression)) + + self.loaded_data[img_name] = full_body.reshape(-1) + self.complete_data.append(full_body.reshape(-1)) + + self.complete_data = np.array(self.complete_data) + + if self.audio_feat_win_size is not None: + self.audio_feat = get_mfcc_old(self.audio_fn).transpose(1, 0) + # print(self.audio_feat.shape) + else: + if self.feat_method == 'mel_spec': + self.audio_feat = get_melspec(self.audio_fn, fps=self.fps, sr=self.audio_sr, n_mels=self.audio_feat_dim) + elif self.feat_method == 'mfcc': + self.audio_feat = get_mfcc(self.audio_fn, + smlpx=True, + sr=self.audio_sr, + n_mfcc=self.audio_feat_dim, + win_size=self.audio_feat_win_size + ) + + def _load_them_all(self, am, am_sr, motion_fn): + self.loaded_data = {} + self.complete_data = [] + f = open(motion_fn, 'rb+') + data = pickle.load(f) + + self.betas = np.array(data['betas']) + + jaw_pose = np.array(data['jaw_pose']) + leye_pose = np.array(data['leye_pose']) + reye_pose = np.array(data['reye_pose']) + global_orient = np.array(data['global_orient']).squeeze() + body_pose = np.array(data['body_pose_axis']) + left_hand_pose = np.array(data['left_hand_pose']) + right_hand_pose = np.array(data['right_hand_pose']) + + full_body = np.concatenate( + (jaw_pose, leye_pose, reye_pose, global_orient, body_pose, left_hand_pose, right_hand_pose), axis=1) + assert full_body.shape[1] == 99 + + + if self.convert_to_6d: + full_body = to3d(full_body) + full_body = torch.from_numpy(full_body) + full_body = matrix_to_rotation_6d(axis_angle_to_matrix(full_body.reshape(-1, 55, 3))).reshape(-1, 330) + full_body = np.asarray(full_body) + if self.expression: + expression = np.array(data['expression']) + full_body = np.concatenate((full_body, expression), axis=1) + + else: + full_body = to3d(full_body) + expression = np.array(data['expression']) + full_body = np.concatenate((full_body, expression), axis=1) + + self.complete_data = full_body + self.complete_data = np.array(self.complete_data) + + if self.audio_feat_win_size is not None: + self.audio_feat = get_mfcc_old(self.audio_fn).transpose(1, 0) + else: + # if self.feat_method == 'mel_spec': + # self.audio_feat = get_melspec(self.audio_fn, fps=self.fps, sr=self.audio_sr, n_mels=self.audio_feat_dim) + # elif self.feat_method == 'mfcc': + self.audio_feat = get_mfcc_ta(self.audio_fn, + smlpx=True, + fps=30, + sr=self.audio_sr, + n_mfcc=self.audio_feat_dim, + win_size=self.audio_feat_win_size, + type=self.feat_method, + am=am, + am_sr=am_sr, + encoder_choice=self.config.Model.encoder_choice, + ) + # with open(audio_file, 'w', encoding='utf-8') as file: + # file.write(json.dumps(self.audio_feat.__array__().tolist(), indent=0, ensure_ascii=False)) + + def get_dataset(self, normalization=False, normalize_stats=None, split='train'): + + class __Worker__(data.Dataset): + def __init__(child, index_list, normalization, normalize_stats, split='train') -> None: + super().__init__() + child.index_list = index_list + child.normalization = normalization + child.normalize_stats = normalize_stats + child.split = split + + def __getitem__(child, index): + num_generate_length = self.num_generate_length + num_pre_frames = self.num_pre_frames + seq_len = num_generate_length + num_pre_frames + # print(num_generate_length) + + index = child.index_list[index] + index_new = index + random.randrange(0, 5, 3) + if index_new + seq_len > self.complete_data.shape[0]: + index_new = index + index = index_new + + if child.split in ['val', 'pre', 'test'] or self.whole_video: + index = 0 + seq_len = self.complete_data.shape[0] + seq_data = [] + assert index + seq_len <= self.complete_data.shape[0] + # print(seq_len) + seq_data = self.complete_data[index:(index + seq_len), :] + seq_data = np.array(seq_data) + + ''' + audio feature, + ''' + if not self.context_info: + if not self.whole_video: + audio_feat = self.audio_feat[index:index + seq_len, ...] + if audio_feat.shape[0] < seq_len: + audio_feat = np.pad(audio_feat, [[0, seq_len - audio_feat.shape[0]], [0, 0]], + mode='reflect') + + assert audio_feat.shape[0] == seq_len and audio_feat.shape[1] == self.audio_feat_dim + else: + audio_feat = self.audio_feat + + else: # including feature and history + if self.audio_feat_win_size is None: + audio_feat = self.audio_feat[index:index + seq_len + num_pre_frames, ...] + if audio_feat.shape[0] < seq_len + num_pre_frames: + audio_feat = np.pad(audio_feat, + [[0, seq_len + self.num_frames - audio_feat.shape[0]], [0, 0]], + mode='constant') + + assert audio_feat.shape[0] == self.num_frames + seq_len and audio_feat.shape[ + 1] == self.audio_feat_dim + + if child.normalization: + data_mean = child.normalize_stats['mean'].reshape(1, -1) + data_std = child.normalize_stats['std'].reshape(1, -1) + seq_data[:, :330] = (seq_data[:, :330] - data_mean) / data_std + if child.split in['train', 'test']: + if self.convert_to_6d: + if self.expression: + data_sample = { + 'poses': seq_data[:, :330].astype(np.float).transpose(1, 0), + 'expression': seq_data[:, 330:].astype(np.float).transpose(1, 0), + # 'nzero': seq_data[:, 375:].astype(np.float).transpose(1, 0), + 'aud_feat': audio_feat.astype(np.float).transpose(1, 0), + 'speaker': speaker_id[self.speaker], + 'betas': self.betas, + 'aud_file': self.audio_fn, + } + else: + data_sample = { + 'poses': seq_data[:, :330].astype(np.float).transpose(1, 0), + 'nzero': seq_data[:, 330:].astype(np.float).transpose(1, 0), + 'aud_feat': audio_feat.astype(np.float).transpose(1, 0), + 'speaker': speaker_id[self.speaker], + 'betas': self.betas + } + else: + if self.expression: + data_sample = { + 'poses': seq_data[:, :165].astype(np.float).transpose(1, 0), + 'expression': seq_data[:, 165:].astype(np.float).transpose(1, 0), + 'aud_feat': audio_feat.astype(np.float).transpose(1, 0), + # 'wv2_feat': wv2_feat.astype(np.float).transpose(1, 0), + 'speaker': speaker_id[self.speaker], + 'aud_file': self.audio_fn, + 'betas': self.betas + } + else: + data_sample = { + 'poses': seq_data.astype(np.float).transpose(1, 0), + 'aud_feat': audio_feat.astype(np.float).transpose(1, 0), + 'speaker': speaker_id[self.speaker], + 'betas': self.betas + } + return data_sample + else: + data_sample = { + 'poses': seq_data[:, :330].astype(np.float).transpose(1, 0), + 'expression': seq_data[:, 330:].astype(np.float).transpose(1, 0), + # 'nzero': seq_data[:, 325:].astype(np.float).transpose(1, 0), + 'aud_feat': audio_feat.astype(np.float).transpose(1, 0), + 'aud_file': self.audio_fn, + 'speaker': speaker_id[self.speaker], + 'betas': self.betas + } + return data_sample + def __len__(child): + return len(child.index_list) + + if split == 'train': + index_list = list( + range(0, min(self.complete_data.shape[0], self.audio_feat.shape[0]) - self.num_generate_length - self.num_pre_frames, + 6)) + elif split in ['val', 'test']: + index_list = list([0]) + if self.whole_video: + index_list = list([0]) + self.all_dataset = __Worker__(index_list, normalization, normalize_stats, split) + + def __len__(self): + return len(self.img_name_list) diff --git a/data_utils/rotation_conversion.py b/data_utils/rotation_conversion.py new file mode 100644 index 0000000000000000000000000000000000000000..770c3bf36f05fcaf89cbb03e17035357f3c0a4df --- /dev/null +++ b/data_utils/rotation_conversion.py @@ -0,0 +1,551 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved. +# Check PYTORCH3D_LICENCE before use + +import functools +from typing import Optional + +import torch +import torch.nn.functional as F + + +""" +The transformation matrices returned from the functions in this file assume +the points on which the transformation will be applied are column vectors. +i.e. the R matrix is structured as + + R = [ + [Rxx, Rxy, Rxz], + [Ryx, Ryy, Ryz], + [Rzx, Rzy, Rzz], + ] # (3, 3) + +This matrix can be applied to column vectors by post multiplication +by the points e.g. + + points = [[0], [1], [2]] # (3 x 1) xyz coordinates of a point + transformed_points = R * points + +To apply the same matrix to points which are row vectors, the R matrix +can be transposed and pre multiplied by the points: + +e.g. + points = [[0, 1, 2]] # (1 x 3) xyz coordinates of a point + transformed_points = points * R.transpose(1, 0) +""" + + +def quaternion_to_matrix(quaternions): + """ + Convert rotations given as quaternions to rotation matrices. + + Args: + quaternions: quaternions with real part first, + as tensor of shape (..., 4). + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + r, i, j, k = torch.unbind(quaternions, -1) + two_s = 2.0 / (quaternions * quaternions).sum(-1) + + o = torch.stack( + ( + 1 - two_s * (j * j + k * k), + two_s * (i * j - k * r), + two_s * (i * k + j * r), + two_s * (i * j + k * r), + 1 - two_s * (i * i + k * k), + two_s * (j * k - i * r), + two_s * (i * k - j * r), + two_s * (j * k + i * r), + 1 - two_s * (i * i + j * j), + ), + -1, + ) + return o.reshape(quaternions.shape[:-1] + (3, 3)) + + +def _copysign(a, b): + """ + Return a tensor where each element has the absolute value taken from the, + corresponding element of a, with sign taken from the corresponding + element of b. This is like the standard copysign floating-point operation, + but is not careful about negative 0 and NaN. + + Args: + a: source tensor. + b: tensor whose signs will be used, of the same shape as a. + + Returns: + Tensor of the same shape as a with the signs of b. + """ + signs_differ = (a < 0) != (b < 0) + return torch.where(signs_differ, -a, a) + + +def _sqrt_positive_part(x): + """ + Returns torch.sqrt(torch.max(0, x)) + but with a zero subgradient where x is 0. + """ + ret = torch.zeros_like(x) + positive_mask = x > 0 + ret[positive_mask] = torch.sqrt(x[positive_mask]) + return ret + + +def matrix_to_quaternion(matrix): + """ + Convert rotations given as rotation matrices to quaternions. + + Args: + matrix: Rotation matrices as tensor of shape (..., 3, 3). + + Returns: + quaternions with real part first, as tensor of shape (..., 4). + """ + if matrix.size(-1) != 3 or matrix.size(-2) != 3: + raise ValueError(f"Invalid rotation matrix shape f{matrix.shape}.") + m00 = matrix[..., 0, 0] + m11 = matrix[..., 1, 1] + m22 = matrix[..., 2, 2] + o0 = 0.5 * _sqrt_positive_part(1 + m00 + m11 + m22) + x = 0.5 * _sqrt_positive_part(1 + m00 - m11 - m22) + y = 0.5 * _sqrt_positive_part(1 - m00 + m11 - m22) + z = 0.5 * _sqrt_positive_part(1 - m00 - m11 + m22) + o1 = _copysign(x, matrix[..., 2, 1] - matrix[..., 1, 2]) + o2 = _copysign(y, matrix[..., 0, 2] - matrix[..., 2, 0]) + o3 = _copysign(z, matrix[..., 1, 0] - matrix[..., 0, 1]) + return torch.stack((o0, o1, o2, o3), -1) + + +def _axis_angle_rotation(axis: str, angle): + """ + Return the rotation matrices for one of the rotations about an axis + of which Euler angles describe, for each value of the angle given. + + Args: + axis: Axis label "X" or "Y or "Z". + angle: any shape tensor of Euler angles in radians + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + + cos = torch.cos(angle) + sin = torch.sin(angle) + one = torch.ones_like(angle) + zero = torch.zeros_like(angle) + + if axis == "X": + R_flat = (one, zero, zero, zero, cos, -sin, zero, sin, cos) + if axis == "Y": + R_flat = (cos, zero, sin, zero, one, zero, -sin, zero, cos) + if axis == "Z": + R_flat = (cos, -sin, zero, sin, cos, zero, zero, zero, one) + + return torch.stack(R_flat, -1).reshape(angle.shape + (3, 3)) + + +def euler_angles_to_matrix(euler_angles, convention: str): + """ + Convert rotations given as Euler angles in radians to rotation matrices. + + Args: + euler_angles: Euler angles in radians as tensor of shape (..., 3). + convention: Convention string of three uppercase letters from + {"X", "Y", and "Z"}. + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + if euler_angles.dim() == 0 or euler_angles.shape[-1] != 3: + raise ValueError("Invalid input euler angles.") + if len(convention) != 3: + raise ValueError("Convention must have 3 letters.") + if convention[1] in (convention[0], convention[2]): + raise ValueError(f"Invalid convention {convention}.") + for letter in convention: + if letter not in ("X", "Y", "Z"): + raise ValueError(f"Invalid letter {letter} in convention string.") + matrices = map(_axis_angle_rotation, convention, torch.unbind(euler_angles, -1)) + return functools.reduce(torch.matmul, matrices) + + +def _angle_from_tan( + axis: str, other_axis: str, data, horizontal: bool, tait_bryan: bool +): + """ + Extract the first or third Euler angle from the two members of + the matrix which are positive constant times its sine and cosine. + + Args: + axis: Axis label "X" or "Y or "Z" for the angle we are finding. + other_axis: Axis label "X" or "Y or "Z" for the middle axis in the + convention. + data: Rotation matrices as tensor of shape (..., 3, 3). + horizontal: Whether we are looking for the angle for the third axis, + which means the relevant entries are in the same row of the + rotation matrix. If not, they are in the same column. + tait_bryan: Whether the first and third axes in the convention differ. + + Returns: + Euler Angles in radians for each matrix in data as a tensor + of shape (...). + """ + + i1, i2 = {"X": (2, 1), "Y": (0, 2), "Z": (1, 0)}[axis] + if horizontal: + i2, i1 = i1, i2 + even = (axis + other_axis) in ["XY", "YZ", "ZX"] + if horizontal == even: + return torch.atan2(data[..., i1], data[..., i2]) + if tait_bryan: + return torch.atan2(-data[..., i2], data[..., i1]) + return torch.atan2(data[..., i2], -data[..., i1]) + + +def _index_from_letter(letter: str): + if letter == "X": + return 0 + if letter == "Y": + return 1 + if letter == "Z": + return 2 + + +def matrix_to_euler_angles(matrix, convention: str): + """ + Convert rotations given as rotation matrices to Euler angles in radians. + + Args: + matrix: Rotation matrices as tensor of shape (..., 3, 3). + convention: Convention string of three uppercase letters. + + Returns: + Euler angles in radians as tensor of shape (..., 3). + """ + if len(convention) != 3: + raise ValueError("Convention must have 3 letters.") + if convention[1] in (convention[0], convention[2]): + raise ValueError(f"Invalid convention {convention}.") + for letter in convention: + if letter not in ("X", "Y", "Z"): + raise ValueError(f"Invalid letter {letter} in convention string.") + if matrix.size(-1) != 3 or matrix.size(-2) != 3: + raise ValueError(f"Invalid rotation matrix shape f{matrix.shape}.") + i0 = _index_from_letter(convention[0]) + i2 = _index_from_letter(convention[2]) + tait_bryan = i0 != i2 + if tait_bryan: + central_angle = torch.asin( + matrix[..., i0, i2] * (-1.0 if i0 - i2 in [-1, 2] else 1.0) + ) + else: + central_angle = torch.acos(matrix[..., i0, i0]) + + o = ( + _angle_from_tan( + convention[0], convention[1], matrix[..., i2], False, tait_bryan + ), + central_angle, + _angle_from_tan( + convention[2], convention[1], matrix[..., i0, :], True, tait_bryan + ), + ) + return torch.stack(o, -1) + + +def random_quaternions( + n: int, dtype: Optional[torch.dtype] = None, device=None, requires_grad=False +): + """ + Generate random quaternions representing rotations, + i.e. versors with nonnegative real part. + + Args: + n: Number of quaternions in a batch to return. + dtype: Type to return. + device: Desired device of returned tensor. Default: + uses the current device for the default tensor type. + requires_grad: Whether the resulting tensor should have the gradient + flag set. + + Returns: + Quaternions as tensor of shape (N, 4). + """ + o = torch.randn((n, 4), dtype=dtype, device=device, requires_grad=requires_grad) + s = (o * o).sum(1) + o = o / _copysign(torch.sqrt(s), o[:, 0])[:, None] + return o + + +def random_rotations( + n: int, dtype: Optional[torch.dtype] = None, device=None, requires_grad=False +): + """ + Generate random rotations as 3x3 rotation matrices. + + Args: + n: Number of rotation matrices in a batch to return. + dtype: Type to return. + device: Device of returned tensor. Default: if None, + uses the current device for the default tensor type. + requires_grad: Whether the resulting tensor should have the gradient + flag set. + + Returns: + Rotation matrices as tensor of shape (n, 3, 3). + """ + quaternions = random_quaternions( + n, dtype=dtype, device=device, requires_grad=requires_grad + ) + return quaternion_to_matrix(quaternions) + + +def random_rotation( + dtype: Optional[torch.dtype] = None, device=None, requires_grad=False +): + """ + Generate a single random 3x3 rotation matrix. + + Args: + dtype: Type to return + device: Device of returned tensor. Default: if None, + uses the current device for the default tensor type + requires_grad: Whether the resulting tensor should have the gradient + flag set + + Returns: + Rotation matrix as tensor of shape (3, 3). + """ + return random_rotations(1, dtype, device, requires_grad)[0] + + +def standardize_quaternion(quaternions): + """ + Convert a unit quaternion to a standard form: one in which the real + part is non negative. + + Args: + quaternions: Quaternions with real part first, + as tensor of shape (..., 4). + + Returns: + Standardized quaternions as tensor of shape (..., 4). + """ + return torch.where(quaternions[..., 0:1] < 0, -quaternions, quaternions) + + +def quaternion_raw_multiply(a, b): + """ + Multiply two quaternions. + Usual torch rules for broadcasting apply. + + Args: + a: Quaternions as tensor of shape (..., 4), real part first. + b: Quaternions as tensor of shape (..., 4), real part first. + + Returns: + The product of a and b, a tensor of quaternions shape (..., 4). + """ + aw, ax, ay, az = torch.unbind(a, -1) + bw, bx, by, bz = torch.unbind(b, -1) + ow = aw * bw - ax * bx - ay * by - az * bz + ox = aw * bx + ax * bw + ay * bz - az * by + oy = aw * by - ax * bz + ay * bw + az * bx + oz = aw * bz + ax * by - ay * bx + az * bw + return torch.stack((ow, ox, oy, oz), -1) + + +def quaternion_multiply(a, b): + """ + Multiply two quaternions representing rotations, returning the quaternion + representing their composition, i.e. the versor with nonnegative real part. + Usual torch rules for broadcasting apply. + + Args: + a: Quaternions as tensor of shape (..., 4), real part first. + b: Quaternions as tensor of shape (..., 4), real part first. + + Returns: + The product of a and b, a tensor of quaternions of shape (..., 4). + """ + ab = quaternion_raw_multiply(a, b) + return standardize_quaternion(ab) + + +def quaternion_invert(quaternion): + """ + Given a quaternion representing rotation, get the quaternion representing + its inverse. + + Args: + quaternion: Quaternions as tensor of shape (..., 4), with real part + first, which must be versors (unit quaternions). + + Returns: + The inverse, a tensor of quaternions of shape (..., 4). + """ + + return quaternion * quaternion.new_tensor([1, -1, -1, -1]) + + +def quaternion_apply(quaternion, point): + """ + Apply the rotation given by a quaternion to a 3D point. + Usual torch rules for broadcasting apply. + + Args: + quaternion: Tensor of quaternions, real part first, of shape (..., 4). + point: Tensor of 3D points of shape (..., 3). + + Returns: + Tensor of rotated points of shape (..., 3). + """ + if point.size(-1) != 3: + raise ValueError(f"Points are not in 3D, f{point.shape}.") + real_parts = point.new_zeros(point.shape[:-1] + (1,)) + point_as_quaternion = torch.cat((real_parts, point), -1) + out = quaternion_raw_multiply( + quaternion_raw_multiply(quaternion, point_as_quaternion), + quaternion_invert(quaternion), + ) + return out[..., 1:] + + +def axis_angle_to_matrix(axis_angle): + """ + Convert rotations given as axis/angle to rotation matrices. + + Args: + axis_angle: Rotations given as a vector in axis angle form, + as a tensor of shape (..., 3), where the magnitude is + the angle turned anticlockwise in radians around the + vector's direction. + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + return quaternion_to_matrix(axis_angle_to_quaternion(axis_angle)) + + +def matrix_to_axis_angle(matrix): + """ + Convert rotations given as rotation matrices to axis/angle. + + Args: + matrix: Rotation matrices as tensor of shape (..., 3, 3). + + Returns: + Rotations given as a vector in axis angle form, as a tensor + of shape (..., 3), where the magnitude is the angle + turned anticlockwise in radians around the vector's + direction. + """ + return quaternion_to_axis_angle(matrix_to_quaternion(matrix)) + + +def axis_angle_to_quaternion(axis_angle): + """ + Convert rotations given as axis/angle to quaternions. + + Args: + axis_angle: Rotations given as a vector in axis angle form, + as a tensor of shape (..., 3), where the magnitude is + the angle turned anticlockwise in radians around the + vector's direction. + + Returns: + quaternions with real part first, as tensor of shape (..., 4). + """ + angles = torch.norm(axis_angle, p=2, dim=-1, keepdim=True) + half_angles = 0.5 * angles + eps = 1e-6 + small_angles = angles.abs() < eps + sin_half_angles_over_angles = torch.empty_like(angles) + sin_half_angles_over_angles[~small_angles] = ( + torch.sin(half_angles[~small_angles]) / angles[~small_angles] + ) + # for x small, sin(x/2) is about x/2 - (x/2)^3/6 + # so sin(x/2)/x is about 1/2 - (x*x)/48 + sin_half_angles_over_angles[small_angles] = ( + 0.5 - (angles[small_angles] * angles[small_angles]) / 48 + ) + quaternions = torch.cat( + [torch.cos(half_angles), axis_angle * sin_half_angles_over_angles], dim=-1 + ) + return quaternions + + +def quaternion_to_axis_angle(quaternions): + """ + Convert rotations given as quaternions to axis/angle. + + Args: + quaternions: quaternions with real part first, + as tensor of shape (..., 4). + + Returns: + Rotations given as a vector in axis angle form, as a tensor + of shape (..., 3), where the magnitude is the angle + turned anticlockwise in radians around the vector's + direction. + """ + norms = torch.norm(quaternions[..., 1:], p=2, dim=-1, keepdim=True) + half_angles = torch.atan2(norms, quaternions[..., :1]) + angles = 2 * half_angles + eps = 1e-6 + small_angles = angles.abs() < eps + sin_half_angles_over_angles = torch.empty_like(angles) + sin_half_angles_over_angles[~small_angles] = ( + torch.sin(half_angles[~small_angles]) / angles[~small_angles] + ) + # for x small, sin(x/2) is about x/2 - (x/2)^3/6 + # so sin(x/2)/x is about 1/2 - (x*x)/48 + sin_half_angles_over_angles[small_angles] = ( + 0.5 - (angles[small_angles] * angles[small_angles]) / 48 + ) + return quaternions[..., 1:] / sin_half_angles_over_angles + + +def rotation_6d_to_matrix(d6: torch.Tensor) -> torch.Tensor: + """ + Converts 6D rotation representation by Zhou et al. [1] to rotation matrix + using Gram--Schmidt orthogonalisation per Section B of [1]. + Args: + d6: 6D rotation representation, of size (*, 6) + + Returns: + batch of rotation matrices of size (*, 3, 3) + + [1] Zhou, Y., Barnes, C., Lu, J., Yang, J., & Li, H. + On the Continuity of Rotation Representations in Neural Networks. + IEEE Conference on Computer Vision and Pattern Recognition, 2019. + Retrieved from http://arxiv.org/abs/1812.07035 + """ + + a1, a2 = d6[..., :3], d6[..., 3:] + b1 = F.normalize(a1, dim=-1) + b2 = a2 - (b1 * a2).sum(-1, keepdim=True) * b1 + b2 = F.normalize(b2, dim=-1) + b3 = torch.cross(b1, b2, dim=-1) + return torch.stack((b1, b2, b3), dim=-2) + + +def matrix_to_rotation_6d(matrix: torch.Tensor) -> torch.Tensor: + """ + Converts rotation matrices to 6D rotation representation by Zhou et al. [1] + by dropping the last row. Note that 6D representation is not unique. + Args: + matrix: batch of rotation matrices of size (*, 3, 3) + + Returns: + 6D rotation representation, of size (*, 6) + + [1] Zhou, Y., Barnes, C., Lu, J., Yang, J., & Li, H. + On the Continuity of Rotation Representations in Neural Networks. + IEEE Conference on Computer Vision and Pattern Recognition, 2019. + Retrieved from http://arxiv.org/abs/1812.07035 + """ + return matrix[..., :2, :].clone().reshape(*matrix.size()[:-2], 6) diff --git a/data_utils/utils.py b/data_utils/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..e99ae12d207c11ab76206274debdd67d1b01611e --- /dev/null +++ b/data_utils/utils.py @@ -0,0 +1,333 @@ +import numpy as np +# import librosa #has to do this cause librosa is not supported on my server +import python_speech_features +from scipy.io import wavfile +from scipy import signal +import librosa +import torch +import torchaudio as ta +import torchaudio.functional as ta_F +import torchaudio.transforms as ta_T +# import pyloudnorm as pyln + + +def load_wav_old(audio_fn, sr = 16000): + sample_rate, sig = wavfile.read(audio_fn) + if sample_rate != sr: + result = int((sig.shape[0]) / sample_rate * sr) + x_resampled = signal.resample(sig, result) + x_resampled = x_resampled.astype(np.float64) + return x_resampled, sr + + sig = sig / (2**15) + return sig, sample_rate + + +def get_mfcc(audio_fn, eps=1e-6, fps=25, smlpx=False, sr=16000, n_mfcc=64, win_size=None): + + y, sr = librosa.load(audio_fn, sr=sr, mono=True) + + if win_size is None: + hop_len=int(sr / fps) + else: + hop_len=int(sr / win_size) + + n_fft=2048 + + C = librosa.feature.mfcc( + y = y, + sr = sr, + n_mfcc = n_mfcc, + hop_length = hop_len, + n_fft = n_fft + ) + + if C.shape[0] == n_mfcc: + C = C.transpose(1, 0) + + return C + + +def get_melspec(audio_fn, eps=1e-6, fps = 25, sr=16000, n_mels=64): + raise NotImplementedError + ''' + # y, sr = load_wav(audio_fn=audio_fn, sr=sr) + + # hop_len = int(sr / fps) + # n_fft = 2048 + + # C = librosa.feature.melspectrogram( + # y = y, + # sr = sr, + # n_fft=n_fft, + # hop_length=hop_len, + # n_mels = n_mels, + # fmin=0, + # fmax=8000) + + + # mask = (C == 0).astype(np.float) + # C = mask * eps + (1-mask) * C + + # C = np.log(C) + # #wierd error may occur here + # assert not (np.isnan(C).any()), audio_fn + # if C.shape[0] == n_mels: + # C = C.transpose(1, 0) + + # return C + ''' + +def extract_mfcc(audio,sample_rate=16000): + mfcc = zip(*python_speech_features.mfcc(audio,sample_rate, numcep=64, nfilt=64, nfft=2048, winstep=0.04)) + mfcc = np.stack([np.array(i) for i in mfcc]) + return mfcc + +def get_mfcc_psf(audio_fn, eps=1e-6, fps=25, smlpx=False, sr=16000, n_mfcc=64, win_size=None): + y, sr = load_wav_old(audio_fn, sr=sr) + + if y.shape.__len__() > 1: + y = (y[:,0]+y[:,1])/2 + + if win_size is None: + hop_len=int(sr / fps) + else: + hop_len=int(sr/ win_size) + + n_fft=2048 + + #hard coded for 25 fps + if not smlpx: + C = python_speech_features.mfcc(y, sr, numcep=n_mfcc, nfilt=n_mfcc, nfft=n_fft, winstep=0.04) + else: + C = python_speech_features.mfcc(y, sr, numcep=n_mfcc, nfilt=n_mfcc, nfft=n_fft, winstep=1.01/15) + # if C.shape[0] == n_mfcc: + # C = C.transpose(1, 0) + + return C + + +def get_mfcc_psf_min(audio_fn, eps=1e-6, fps=25, smlpx=False, sr=16000, n_mfcc=64, win_size=None): + y, sr = load_wav_old(audio_fn, sr=sr) + + if y.shape.__len__() > 1: + y = (y[:, 0] + y[:, 1]) / 2 + n_fft = 2048 + + slice_len = 22000 * 5 + slice = y.size // slice_len + + C = [] + + for i in range(slice): + if i != (slice - 1): + feat = python_speech_features.mfcc(y[i*slice_len:(i+1)*slice_len], sr, numcep=n_mfcc, nfilt=n_mfcc, nfft=n_fft, winstep=1.01 / 15) + else: + feat = python_speech_features.mfcc(y[i * slice_len:], sr, numcep=n_mfcc, nfilt=n_mfcc, nfft=n_fft, winstep=1.01 / 15) + + C.append(feat) + + return C + + +def audio_chunking(audio: torch.Tensor, frame_rate: int = 30, chunk_size: int = 16000): + """ + :param audio: 1 x T tensor containing a 16kHz audio signal + :param frame_rate: frame rate for video (we need one audio chunk per video frame) + :param chunk_size: number of audio samples per chunk + :return: num_chunks x chunk_size tensor containing sliced audio + """ + samples_per_frame = chunk_size // frame_rate + padding = (chunk_size - samples_per_frame) // 2 + audio = torch.nn.functional.pad(audio.unsqueeze(0), pad=[padding, padding]).squeeze(0) + anchor_points = list(range(chunk_size//2, audio.shape[-1]-chunk_size//2, samples_per_frame)) + audio = torch.cat([audio[:, i-chunk_size//2:i+chunk_size//2] for i in anchor_points], dim=0) + return audio + + +def get_mfcc_ta(audio_fn, eps=1e-6, fps=15, smlpx=False, sr=16000, n_mfcc=64, win_size=None, type='mfcc', am=None, am_sr=None, encoder_choice='mfcc'): + if am is None: + sr_0, audio = audio_fn + audio = torch.tensor(audio)/32767 + if len(audio.shape) == 1: + audio.unsqueeze_(dim=0) + elif audio.shape[1] == 1 or audio.shape[1] == 2: + audio.transpose_(0, 1) + + if sr != sr_0: + audio = ta.transforms.Resample(sr_0, sr)(audio) + if audio.shape[0] > 1: + audio = torch.mean(audio, dim=0, keepdim=True) + + n_fft = 2048 + if fps == 15: + hop_length = 1467 + elif fps == 30: + hop_length = 734 + win_length = hop_length * 2 + n_mels = 256 + n_mfcc = 64 + + if type == 'mfcc': + mfcc_transform = ta_T.MFCC( + sample_rate=sr, + n_mfcc=n_mfcc, + melkwargs={ + "n_fft": n_fft, + "n_mels": n_mels, + # "win_length": win_length, + "hop_length": hop_length, + "mel_scale": "htk", + }, + ) + audio_ft = mfcc_transform(audio).squeeze(dim=0).transpose(0,1).numpy() + elif type == 'mel': + # audio = 0.01 * audio / torch.mean(torch.abs(audio)) + mel_transform = ta_T.MelSpectrogram( + sample_rate=sr, n_fft=n_fft, win_length=None, hop_length=hop_length, n_mels=n_mels + ) + audio_ft = mel_transform(audio).squeeze(0).transpose(0,1).numpy() + # audio_ft = torch.log(audio_ft.clamp(min=1e-10, max=None)).transpose(0,1).numpy() + elif type == 'mel_mul': + audio = 0.01 * audio / torch.mean(torch.abs(audio)) + audio = audio_chunking(audio, frame_rate=fps, chunk_size=sr) + mel_transform = ta_T.MelSpectrogram( + sample_rate=sr, n_fft=n_fft, win_length=int(sr/20), hop_length=int(sr/100), n_mels=n_mels + ) + audio_ft = mel_transform(audio).squeeze(1) + audio_ft = torch.log(audio_ft.clamp(min=1e-10, max=None)).numpy() + else: + sampling_rate, speech_array = audio_fn + speech_array = torch.tensor(speech_array) / 32767 + if len(speech_array.shape) == 1: + speech_array.unsqueeze_(0) + elif speech_array.shape[1] == 1 or speech_array.shape[1] == 2: + speech_array.transpose_(0, 1) + if sr != sampling_rate: + speech_array = ta.transforms.Resample(sampling_rate, sr)(speech_array) + speech_array = torch.mean(speech_array, dim=0, keepdim=True) + speech_array = speech_array.numpy() + + if encoder_choice == 'faceformer': + # audio_ft = np.squeeze(am(speech_array, sampling_rate=16000).input_values).reshape(-1, 1) + audio_ft = speech_array.reshape(-1, 1) + elif encoder_choice == 'meshtalk': + audio_ft = 0.01 * speech_array / np.mean(np.abs(speech_array)) + elif encoder_choice == 'onset': + audio_ft = librosa.onset.onset_detect(y=speech_array, sr=16000, units='time').reshape(-1, 1) + else: + audio, sr_0 = ta.load(audio_fn) + if sr != sr_0: + audio = ta.transforms.Resample(sr_0, sr)(audio) + if audio.shape[0] > 1: + audio = torch.mean(audio, dim=0, keepdim=True) + + n_fft = 2048 + if fps == 15: + hop_length = 1467 + elif fps == 30: + hop_length = 734 + win_length = hop_length * 2 + n_mels = 256 + n_mfcc = 64 + + mfcc_transform = ta_T.MFCC( + sample_rate=sr, + n_mfcc=n_mfcc, + melkwargs={ + "n_fft": n_fft, + "n_mels": n_mels, + # "win_length": win_length, + "hop_length": hop_length, + "mel_scale": "htk", + }, + ) + audio_ft = mfcc_transform(audio).squeeze(dim=0).transpose(0, 1).numpy() + return audio_ft + + +def get_mfcc_sepa(audio_fn, fps=15, sr=16000): + audio, sr_0 = ta.load(audio_fn) + if sr != sr_0: + audio = ta.transforms.Resample(sr_0, sr)(audio) + if audio.shape[0] > 1: + audio = torch.mean(audio, dim=0, keepdim=True) + + n_fft = 2048 + if fps == 15: + hop_length = 1467 + elif fps == 30: + hop_length = 734 + n_mels = 256 + n_mfcc = 64 + + mfcc_transform = ta_T.MFCC( + sample_rate=sr, + n_mfcc=n_mfcc, + melkwargs={ + "n_fft": n_fft, + "n_mels": n_mels, + # "win_length": win_length, + "hop_length": hop_length, + "mel_scale": "htk", + }, + ) + audio_ft_0 = mfcc_transform(audio[0, :sr*2]).squeeze(dim=0).transpose(0,1).numpy() + audio_ft_1 = mfcc_transform(audio[0, sr*2:]).squeeze(dim=0).transpose(0,1).numpy() + audio_ft = np.concatenate((audio_ft_0, audio_ft_1), axis=0) + return audio_ft, audio_ft_0.shape[0] + + +def get_mfcc_old(wav_file): + sig, sample_rate = load_wav_old(wav_file) + mfcc = extract_mfcc(sig) + return mfcc + + +def smooth_geom(geom, mask: torch.Tensor = None, filter_size: int = 9, sigma: float = 2.0): + """ + :param geom: T x V x 3 tensor containing a temporal sequence of length T with V vertices in each frame + :param mask: V-dimensional Tensor containing a mask with vertices to be smoothed + :param filter_size: size of the Gaussian filter + :param sigma: standard deviation of the Gaussian filter + :return: T x V x 3 tensor containing smoothed geometry (i.e., smoothed in the area indicated by the mask) + """ + assert filter_size % 2 == 1, f"filter size must be odd but is {filter_size}" + # Gaussian smoothing (low-pass filtering) + fltr = np.arange(-(filter_size // 2), filter_size // 2 + 1) + fltr = np.exp(-0.5 * fltr ** 2 / sigma ** 2) + fltr = torch.Tensor(fltr) / np.sum(fltr) + # apply fltr + fltr = fltr.view(1, 1, -1).to(device=geom.device) + T, V = geom.shape[1], geom.shape[2] + g = torch.nn.functional.pad( + geom.permute(2, 0, 1).view(V, 1, T), + pad=[filter_size // 2, filter_size // 2], mode='replicate' + ) + g = torch.nn.functional.conv1d(g, fltr).view(V, 1, T) + smoothed = g.permute(1, 2, 0).contiguous() + # blend smoothed signal with original signal + if mask is None: + return smoothed + else: + return smoothed * mask[None, :, None] + geom * (-mask[None, :, None] + 1) + +if __name__ == '__main__': + audio_fn = '../sample_audio/clip000028_tCAkv4ggPgI.wav' + + C = get_mfcc_psf(audio_fn) + print(C.shape) + + C_2 = get_mfcc_librosa(audio_fn) + print(C.shape) + + print(C) + print(C_2) + print((C == C_2).all()) + # print(y.shape, sr) + # mel_spec = get_melspec(audio_fn) + # print(mel_spec.shape) + # mfcc = get_mfcc(audio_fn, sr = 16000) + # print(mfcc.shape) + # print(mel_spec.max(), mel_spec.min()) + # print(mfcc.max(), mfcc.min()) \ No newline at end of file diff --git a/demo/1st-page/1st-page-upper.mp4 b/demo/1st-page/1st-page-upper.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..f1c4f843fbfcdb4264a1f9be4c5ee3e021054748 Binary files /dev/null and b/demo/1st-page/1st-page-upper.mp4 differ diff --git a/demo/1st-page/1st-page-upper.npy b/demo/1st-page/1st-page-upper.npy new file mode 100644 index 0000000000000000000000000000000000000000..ddefe91a468cac9fd3af59fde27bd1752553027a --- /dev/null +++ b/demo/1st-page/1st-page-upper.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:159eefc300544ea95d919b49707afa466e6246135da9a986b4abbc55bbc54850 +size 407168 diff --git a/demo/french/french.mp4 b/demo/french/french.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..6a0295f696c0775c3df26ec3fdcca7ea3c7f62a8 Binary files /dev/null and b/demo/french/french.mp4 differ diff --git a/demo/french/french.npy b/demo/french/french.npy new file mode 100644 index 0000000000000000000000000000000000000000..1e6682a87775d766ac12ccdd34cb407a00d0ed26 --- /dev/null +++ b/demo/french/french.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:289d7a2abb18efa495587a4c4b094a109bdb7d3efd779800f028708bde4d1477 +size 305408 diff --git a/demo/rich/rich.mp4 b/demo/rich/rich.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..5ed32fed65de51cb4f60ce72e560232198b2902d --- /dev/null +++ b/demo/rich/rich.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8bc50b66c7df10233191921a6a3f19c2895249997206f30e5e099cc10b90903a +size 3608757 diff --git a/demo/rich/rich.npy b/demo/rich/rich.npy new file mode 100644 index 0000000000000000000000000000000000000000..22179046cb02da9ee0f9348d05cf3c8e1cc5bbf1 --- /dev/null +++ b/demo/rich/rich.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d03c956ed3992980fe37581019ec12350531489b12b46a55cfc4c562f7bd8ddb +size 1908128 diff --git a/demo/song/cut.mp4 b/demo/song/cut.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..1505706b20bec3c98ee1af611ecf8fa0ed37fcaf Binary files /dev/null and b/demo/song/cut.mp4 differ diff --git a/demo/song/song.mp4 b/demo/song/song.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..ac70c20ef365ab82123cc11609d1d71ddb2e84ed --- /dev/null +++ b/demo/song/song.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8827d6daaec213bee7bd32af68a0cf8ea83d154f32d006bd7f38120e2c282045 +size 3178290 diff --git a/demo/song/song.npy b/demo/song/song.npy new file mode 100644 index 0000000000000000000000000000000000000000..b64d21f559526237a1254619d90cda039083cb39 --- /dev/null +++ b/demo/song/song.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:157bfbde5a1b15ac812e52d8b08997be1a41fae93b3a7fe613b897d1ff5d8996 +size 1707788 diff --git a/demo/style/chemistry.mp4 b/demo/style/chemistry.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..e7bd2500494ce9d05074d988b59799bad5f23206 Binary files /dev/null and b/demo/style/chemistry.mp4 differ diff --git a/demo/style/chemistry.npy b/demo/style/chemistry.npy new file mode 100644 index 0000000000000000000000000000000000000000..f0db25ab27a7d05d0f8ddeddc8aca326e924d1bd --- /dev/null +++ b/demo/style/chemistry.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5a8dc42938343bc10b149a6a74d43d5a4cef010c6f2a0c58bffee7f48b2a1e81 +size 318128 diff --git a/demo/style/conan.mp4 b/demo/style/conan.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..d1d3faeb054e2a84124d4b7e006caa5ad5cf09b4 Binary files /dev/null and b/demo/style/conan.mp4 differ diff --git a/demo/style/conan.npy b/demo/style/conan.npy new file mode 100644 index 0000000000000000000000000000000000000000..28663c462543a715d9b2d7d002641f202e169215 --- /dev/null +++ b/demo/style/conan.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:350ca76806d86ff7b36fbfeaec219d7c0cf515c3c23dfe6791143b82e7ec3327 +size 318128 diff --git a/demo/style/diversity.mp4 b/demo/style/diversity.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..1b799f259dcb42dc833ca73199542fab059dc2f9 --- /dev/null +++ b/demo/style/diversity.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:09fd9e6330ced1ecbf10a6e7e0a4f6ebad098eb44115a2ee35a070d02e522ec8 +size 5882474 diff --git a/demo/style/diversity.npy b/demo/style/diversity.npy new file mode 100644 index 0000000000000000000000000000000000000000..d93f46319a77f1e892b473915aef80b1180d9c12 --- /dev/null +++ b/demo/style/diversity.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4e4c37f510943dad934da97a8eade5ddce25165df20419e74606fb0160b4ce07 +size 3816128 diff --git a/demo/style/face.mp4 b/demo/style/face.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..6bccfbb24aab7b8cf063b5080cd63e7e38b1a7f2 Binary files /dev/null and b/demo/style/face.mp4 differ diff --git a/demo/style/face.npy b/demo/style/face.npy new file mode 100644 index 0000000000000000000000000000000000000000..6c9c8234caa8676de76f18dab28acda7fe5eef17 --- /dev/null +++ b/demo/style/face.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b51d0d309e92449323ab481a3cc16c88d2b04f6f487eb366720a9ad7f8754f03 +size 318128 diff --git a/demo/style/oliver.mp4 b/demo/style/oliver.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..a1b8b6edb19aeefb93569c714b9e90d518841886 Binary files /dev/null and b/demo/style/oliver.mp4 differ diff --git a/demo/style/oliver.npy b/demo/style/oliver.npy new file mode 100644 index 0000000000000000000000000000000000000000..011341e206f9926ff3785b9b46d17423de0bd909 --- /dev/null +++ b/demo/style/oliver.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:149258f227975e1f07b449f0ab5e4c3e3e1458f97fa646360eac3f1428c52f5a +size 318128 diff --git a/demo/style/seth.mp4 b/demo/style/seth.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..087f036b54e172693f5b8819773a5a87c464832b Binary files /dev/null and b/demo/style/seth.mp4 differ diff --git a/demo/style/seth.npy b/demo/style/seth.npy new file mode 100644 index 0000000000000000000000000000000000000000..b5553e566ce8a735eb33a1524571cba8fc4419fc --- /dev/null +++ b/demo/style/seth.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6132a40d60ee8cf954d74293ea46e75cd4b4d2001ca96fc4713abe06a34b5a3c +size 318128 diff --git a/demo_audio/1st-page.wav b/demo_audio/1st-page.wav new file mode 100644 index 0000000000000000000000000000000000000000..973728edd9c4e0bb2825885501988535aab34a18 Binary files /dev/null and b/demo_audio/1st-page.wav differ diff --git a/demo_audio/french.wav b/demo_audio/french.wav new file mode 100644 index 0000000000000000000000000000000000000000..b1070bf37e49c080d9e6bb0449c87d50dc2e54a4 Binary files /dev/null and b/demo_audio/french.wav differ diff --git a/demo_audio/rich.wav b/demo_audio/rich.wav new file mode 100644 index 0000000000000000000000000000000000000000..d6eef9ae75b7402642e3e22b2be089c908d32b8e --- /dev/null +++ b/demo_audio/rich.wav @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:db9c793b66a64ffb11f0f673e70f9e0188bfa1ce95a391cb9af7d9c7ccf92597 +size 10584078 diff --git a/demo_audio/rich_short.wav b/demo_audio/rich_short.wav new file mode 100644 index 0000000000000000000000000000000000000000..b760315b9ba6119c2da4cb063fec016c85a5ff5d --- /dev/null +++ b/demo_audio/rich_short.wav @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6fdbba0caf07f44f74294b6853ac37e7c53ea7c9ec1aace773744c3478effa5c +size 2293838 diff --git a/demo_audio/song.wav b/demo_audio/song.wav new file mode 100644 index 0000000000000000000000000000000000000000..c5dee67755aa85febc9bfc6158706846e9305d27 --- /dev/null +++ b/demo_audio/song.wav @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c5e09f549bf5bb8d46492cbaa729f88ef556ddcee7fb497db2faa81bf45e7425 +size 9475620 diff --git a/demo_audio/style.wav b/demo_audio/style.wav new file mode 100644 index 0000000000000000000000000000000000000000..d27754760ee378e133e92b9329e2ba4397353939 Binary files /dev/null and b/demo_audio/style.wav differ diff --git a/evaluation/FGD.py b/evaluation/FGD.py new file mode 100644 index 0000000000000000000000000000000000000000..d5521ee30b8751ef3bdd980ee91231ab78fea6e5 --- /dev/null +++ b/evaluation/FGD.py @@ -0,0 +1,199 @@ +import time + +import numpy as np +import torch +import torch.nn.functional as F +from scipy import linalg +import math +from data_utils.rotation_conversion import axis_angle_to_matrix, matrix_to_rotation_6d + +import warnings +warnings.filterwarnings("ignore", category=RuntimeWarning) # ignore warnings + + +change_angle = torch.tensor([6.0181e-05, 5.1597e-05, 2.1344e-04, 2.1899e-04]) +class EmbeddingSpaceEvaluator: + def __init__(self, ae, vae, device): + + # init embed net + self.ae = ae + # self.vae = vae + + # storage + self.real_feat_list = [] + self.generated_feat_list = [] + self.real_joints_list = [] + self.generated_joints_list = [] + self.real_6d_list = [] + self.generated_6d_list = [] + self.audio_beat_list = [] + + def reset(self): + self.real_feat_list = [] + self.generated_feat_list = [] + + def get_no_of_samples(self): + return len(self.real_feat_list) + + def push_samples(self, generated_poses, real_poses): + # self.net.eval() + # convert poses to latent features + real_feat, real_poses = self.ae.extract(real_poses) + generated_feat, generated_poses = self.ae.extract(generated_poses) + + num_joints = real_poses.shape[2] // 3 + + real_feat = real_feat.squeeze() + generated_feat = generated_feat.reshape(generated_feat.shape[0]*generated_feat.shape[1], -1) + + self.real_feat_list.append(real_feat.data.cpu().numpy()) + self.generated_feat_list.append(generated_feat.data.cpu().numpy()) + + # real_poses = matrix_to_rotation_6d(axis_angle_to_matrix(real_poses.reshape(-1, 3))).reshape(-1, num_joints, 6) + # generated_poses = matrix_to_rotation_6d(axis_angle_to_matrix(generated_poses.reshape(-1, 3))).reshape(-1, num_joints, 6) + # + # self.real_feat_list.append(real_poses.data.cpu().numpy()) + # self.generated_feat_list.append(generated_poses.data.cpu().numpy()) + + def push_joints(self, generated_poses, real_poses): + self.real_joints_list.append(real_poses.data.cpu()) + self.generated_joints_list.append(generated_poses.squeeze().data.cpu()) + + def push_aud(self, aud): + self.audio_beat_list.append(aud.squeeze().data.cpu()) + + def get_MAAC(self): + ang_vel_list = [] + for real_joints in self.real_joints_list: + real_joints[:, 15:21] = real_joints[:, 16:22] + vec = real_joints[:, 15:21] - real_joints[:, 13:19] + inner_product = torch.einsum('kij,kij->ki', [vec[:, 2:], vec[:, :-2]]) + inner_product = torch.clamp(inner_product, -1, 1, out=None) + angle = torch.acos(inner_product) / math.pi + ang_vel = (angle[1:] - angle[:-1]).abs().mean(dim=0) + ang_vel_list.append(ang_vel.unsqueeze(dim=0)) + all_vel = torch.cat(ang_vel_list, dim=0) + MAAC = all_vel.mean(dim=0) + return MAAC + + def get_BCscore(self): + thres = 0.01 + sigma = 0.1 + sum_1 = 0 + total_beat = 0 + for joints, audio_beat_time in zip(self.generated_joints_list, self.audio_beat_list): + motion_beat_time = [] + if joints.dim() == 4: + joints = joints[0] + joints[:, 15:21] = joints[:, 16:22] + vec = joints[:, 15:21] - joints[:, 13:19] + inner_product = torch.einsum('kij,kij->ki', [vec[:, 2:], vec[:, :-2]]) + inner_product = torch.clamp(inner_product, -1, 1, out=None) + angle = torch.acos(inner_product) / math.pi + ang_vel = (angle[1:] - angle[:-1]).abs() / change_angle / len(change_angle) + + angle_diff = torch.cat((torch.zeros(1, 4), ang_vel), dim=0) + + sum_2 = 0 + for i in range(angle_diff.shape[1]): + motion_beat_time = [] + for t in range(1, joints.shape[0]-1): + if (angle_diff[t][i] < angle_diff[t - 1][i] and angle_diff[t][i] < angle_diff[t + 1][i]): + if (angle_diff[t - 1][i] - angle_diff[t][i] >= thres or angle_diff[t + 1][i] - angle_diff[ + t][i] >= thres): + motion_beat_time.append(float(t) / 30.0) + if (len(motion_beat_time) == 0): + continue + motion_beat_time = torch.tensor(motion_beat_time) + sum = 0 + for audio in audio_beat_time: + sum += np.power(math.e, -(np.power((audio.item() - motion_beat_time), 2)).min() / (2 * sigma * sigma)) + sum_2 = sum_2 + sum + total_beat = total_beat + len(audio_beat_time) + sum_1 = sum_1 + sum_2 + return sum_1/total_beat + + + def get_scores(self): + generated_feats = np.vstack(self.generated_feat_list) + real_feats = np.vstack(self.real_feat_list) + + def frechet_distance(samples_A, samples_B): + A_mu = np.mean(samples_A, axis=0) + A_sigma = np.cov(samples_A, rowvar=False) + B_mu = np.mean(samples_B, axis=0) + B_sigma = np.cov(samples_B, rowvar=False) + try: + frechet_dist = self.calculate_frechet_distance(A_mu, A_sigma, B_mu, B_sigma) + except ValueError: + frechet_dist = 1e+10 + return frechet_dist + + #################################################################### + # frechet distance + frechet_dist = frechet_distance(generated_feats, real_feats) + + #################################################################### + # distance between real and generated samples on the latent feature space + dists = [] + for i in range(real_feats.shape[0]): + d = np.sum(np.absolute(real_feats[i] - generated_feats[i])) # MAE + dists.append(d) + feat_dist = np.mean(dists) + + return frechet_dist, feat_dist + + @staticmethod + def calculate_frechet_distance(mu1, sigma1, mu2, sigma2, eps=1e-6): + """ from https://github.com/mseitzer/pytorch-fid/blob/master/fid_score.py """ + """Numpy implementation of the Frechet Distance. + The Frechet distance between two multivariate Gaussians X_1 ~ N(mu_1, C_1) + and X_2 ~ N(mu_2, C_2) is + d^2 = ||mu_1 - mu_2||^2 + Tr(C_1 + C_2 - 2*sqrt(C_1*C_2)). + Stable version by Dougal J. Sutherland. + Params: + -- mu1 : Numpy array containing the activations of a layer of the + inception net (like returned by the function 'get_predictions') + for generated samples. + -- mu2 : The sample mean over activations, precalculated on an + representative data set. + -- sigma1: The covariance matrix over activations for generated samples. + -- sigma2: The covariance matrix over activations, precalculated on an + representative data set. + Returns: + -- : The Frechet Distance. + """ + + mu1 = np.atleast_1d(mu1) + mu2 = np.atleast_1d(mu2) + + sigma1 = np.atleast_2d(sigma1) + sigma2 = np.atleast_2d(sigma2) + + assert mu1.shape == mu2.shape, \ + 'Training and test mean vectors have different lengths' + assert sigma1.shape == sigma2.shape, \ + 'Training and test covariances have different dimensions' + + diff = mu1 - mu2 + + # Product might be almost singular + covmean, _ = linalg.sqrtm(sigma1.dot(sigma2), disp=False) + if not np.isfinite(covmean).all(): + msg = ('fid calculation produces singular product; ' + 'adding %s to diagonal of cov estimates') % eps + print(msg) + offset = np.eye(sigma1.shape[0]) * eps + covmean = linalg.sqrtm((sigma1 + offset).dot(sigma2 + offset)) + + # Numerical error might give slight imaginary component + if np.iscomplexobj(covmean): + if not np.allclose(np.diagonal(covmean).imag, 0, atol=1e-3): + m = np.max(np.abs(covmean.imag)) + raise ValueError('Imaginary component {}'.format(m)) + covmean = covmean.real + + tr_covmean = np.trace(covmean) + + return (diff.dot(diff) + np.trace(sigma1) + + np.trace(sigma2) - 2 * tr_covmean) \ No newline at end of file diff --git a/evaluation/__init__.py b/evaluation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/evaluation/__pycache__/__init__.cpython-37.pyc b/evaluation/__pycache__/__init__.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c1631bfe6e7bc4b06edead82accfc11190e66830 Binary files /dev/null and b/evaluation/__pycache__/__init__.cpython-37.pyc differ diff --git a/evaluation/__pycache__/metrics.cpython-37.pyc b/evaluation/__pycache__/metrics.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cc7c877f5fd731130c82d1b67a180ac9b69cc251 Binary files /dev/null and b/evaluation/__pycache__/metrics.cpython-37.pyc differ diff --git a/evaluation/diversity_LVD.py b/evaluation/diversity_LVD.py new file mode 100644 index 0000000000000000000000000000000000000000..cfd7dd81118692ea0c361c6016afa29d98665315 --- /dev/null +++ b/evaluation/diversity_LVD.py @@ -0,0 +1,64 @@ +''' +LVD: different initial pose +diversity: same initial pose +''' +import os +import sys +sys.path.append(os.getcwd()) + +from glob import glob + +from argparse import ArgumentParser +import json + +from evaluation.util import * +from evaluation.metrics import * +from tqdm import tqdm + +parser = ArgumentParser() +parser.add_argument('--speaker', required=True, type=str) +parser.add_argument('--post_fix', nargs='+', default=['base'], type=str) +args = parser.parse_args() + +speaker = args.speaker +test_audios = sorted(glob('pose_dataset/videos/test_audios/%s/*.wav'%(speaker))) + +LVD_list = [] +diversity_list = [] + +for aud in tqdm(test_audios): + base_name = os.path.splitext(aud)[0] + gt_path = get_full_path(aud, speaker, 'val') + _, gt_poses, _ = get_gts(gt_path) + gt_poses = gt_poses[np.newaxis,...] + # print(gt_poses.shape)#(seq_len, 135*2)pose, lhand, rhand, face + for post_fix in args.post_fix: + pred_path = base_name + '_'+post_fix+'.json' + pred_poses = np.array(json.load(open(pred_path))) + # print(pred_poses.shape)#(B, seq_len, 108) + pred_poses = cvt25(pred_poses, gt_poses) + # print(pred_poses.shape)#(B, seq, pose_dim) + + gt_valid_points = hand_points(gt_poses) + pred_valid_points = hand_points(pred_poses) + + lvd = LVD(gt_valid_points, pred_valid_points) + # div = diversity(pred_valid_points) + + LVD_list.append(lvd) + # diversity_list.append(div) + + # gt_velocity = peak_velocity(gt_valid_points, order=2) + # pred_velocity = peak_velocity(pred_valid_points, order=2) + + # gt_consistency = velocity_consistency(gt_velocity, pred_velocity) + # pred_consistency = velocity_consistency(pred_velocity, gt_velocity) + + # gt_consistency_list.append(gt_consistency) + # pred_consistency_list.append(pred_consistency) + +lvd = np.mean(LVD_list) +# diversity_list = np.mean(diversity_list) + +print('LVD:', lvd) +# print("diversity:", diversity_list) \ No newline at end of file diff --git a/evaluation/get_quality_samples.py b/evaluation/get_quality_samples.py new file mode 100644 index 0000000000000000000000000000000000000000..b8ef393cd310aa2e75f871122a62f45dd525e47c --- /dev/null +++ b/evaluation/get_quality_samples.py @@ -0,0 +1,62 @@ +''' +''' +import os +import sys +sys.path.append(os.getcwd()) + +from glob import glob + +from argparse import ArgumentParser +import json + +from evaluation.util import * +from evaluation.metrics import * +from tqdm import tqdm + +parser = ArgumentParser() +parser.add_argument('--speaker', required=True, type=str) +parser.add_argument('--post_fix', nargs='+', default=['paper_model'], type=str) +args = parser.parse_args() + +speaker = args.speaker +test_audios = sorted(glob('pose_dataset/videos/test_audios/%s/*.wav'%(speaker))) + +quality_samples={'gt':[]} +for post_fix in args.post_fix: + quality_samples[post_fix] = [] + +for aud in tqdm(test_audios): + base_name = os.path.splitext(aud)[0] + gt_path = get_full_path(aud, speaker, 'val') + _, gt_poses, _ = get_gts(gt_path) + gt_poses = gt_poses[np.newaxis,...] + gt_valid_points = valid_points(gt_poses) + # print(gt_valid_points.shape) + quality_samples['gt'].append(gt_valid_points) + + for post_fix in args.post_fix: + pred_path = base_name + '_'+post_fix+'.json' + pred_poses = np.array(json.load(open(pred_path))) + # print(pred_poses.shape)#(B, seq_len, 108) + pred_poses = cvt25(pred_poses, gt_poses) + # print(pred_poses.shape)#(B, seq, pose_dim) + + pred_valid_points = valid_points(pred_poses)[0:1] + quality_samples[post_fix].append(pred_valid_points) + +quality_samples['gt'] = np.concatenate(quality_samples['gt'], axis=1) +for post_fix in args.post_fix: + quality_samples[post_fix] = np.concatenate(quality_samples[post_fix], axis=1) + +print('gt:', quality_samples['gt'].shape) +quality_samples['gt'] = quality_samples['gt'].tolist() +for post_fix in args.post_fix: + print(post_fix, ':', quality_samples[post_fix].shape) + quality_samples[post_fix] = quality_samples[post_fix].tolist() + +save_dir = '../../experiments/' +os.makedirs(save_dir, exist_ok=True) +save_name = os.path.join(save_dir, 'quality_samples_%s.json'%(speaker)) +with open(save_name, 'w') as f: + json.dump(quality_samples, f) + diff --git a/evaluation/metrics.py b/evaluation/metrics.py new file mode 100644 index 0000000000000000000000000000000000000000..93dc8fde8b8de57cc2f0f7387afd5de7cb753835 --- /dev/null +++ b/evaluation/metrics.py @@ -0,0 +1,109 @@ +''' +Warning: metrics are for reference only, may have limited significance +''' +import os +import sys +sys.path.append(os.getcwd()) +import numpy as np +import torch + +from data_utils.lower_body import rearrange, symmetry +import torch.nn.functional as F + +def data_driven_baselines(gt_kps): + ''' + gt_kps: T, D + ''' + gt_velocity = np.abs(gt_kps[1:] - gt_kps[:-1]) + + mean= np.mean(gt_velocity, axis=0)[np.newaxis] #(1, D) + mean = np.mean(np.abs(gt_velocity-mean)) + last_step = gt_kps[1] - gt_kps[0] + last_step = last_step[np.newaxis] #(1, D) + last_step = np.mean(np.abs(gt_velocity-last_step)) + return last_step, mean + +def Batch_LVD(gt_kps, pr_kps, symmetrical, weight): + if gt_kps.shape[0] > pr_kps.shape[1]: + length = pr_kps.shape[1] + else: + length = gt_kps.shape[0] + gt_kps = gt_kps[:length] + pr_kps = pr_kps[:, :length] + global symmetry + symmetry = torch.tensor(symmetry).bool() + + if symmetrical: + # rearrange for compute symmetric. ns means non-symmetrical joints, ys means symmetrical joints. + gt_kps = gt_kps[:, rearrange] + ns_gt_kps = gt_kps[:, ~symmetry] + ys_gt_kps = gt_kps[:, symmetry] + ys_gt_kps = ys_gt_kps.reshape(ys_gt_kps.shape[0], -1, 2, 3) + ns_gt_velocity = (ns_gt_kps[1:] - ns_gt_kps[:-1]).norm(p=2, dim=-1) + ys_gt_velocity = (ys_gt_kps[1:] - ys_gt_kps[:-1]).norm(p=2, dim=-1) + left_gt_vel = ys_gt_velocity[:, :, 0].sum(dim=-1) + right_gt_vel = ys_gt_velocity[:, :, 1].sum(dim=-1) + move_side = torch.where(left_gt_vel>right_gt_vel, torch.ones(left_gt_vel.shape).cuda(), torch.zeros(left_gt_vel.shape).cuda()) + ys_gt_velocity = torch.mul(ys_gt_velocity[:, :, 0].transpose(0,1), move_side) + torch.mul(ys_gt_velocity[:, :, 1].transpose(0,1), ~move_side.bool()) + ys_gt_velocity = ys_gt_velocity.transpose(0,1) + gt_velocity = torch.cat([ns_gt_velocity, ys_gt_velocity], dim=1) + + pr_kps = pr_kps[:, :, rearrange] + ns_pr_kps = pr_kps[:, :, ~symmetry] + ys_pr_kps = pr_kps[:, :, symmetry] + ys_pr_kps = ys_pr_kps.reshape(ys_pr_kps.shape[0], ys_pr_kps.shape[1], -1, 2, 3) + ns_pr_velocity = (ns_pr_kps[:, 1:] - ns_pr_kps[:, :-1]).norm(p=2, dim=-1) + ys_pr_velocity = (ys_pr_kps[:, 1:] - ys_pr_kps[:, :-1]).norm(p=2, dim=-1) + left_pr_vel = ys_pr_velocity[:, :, :, 0].sum(dim=-1) + right_pr_vel = ys_pr_velocity[:, :, :, 1].sum(dim=-1) + move_side = torch.where(left_pr_vel > right_pr_vel, torch.ones(left_pr_vel.shape).cuda(), + torch.zeros(left_pr_vel.shape).cuda()) + ys_pr_velocity = torch.mul(ys_pr_velocity[..., 0].permute(2, 0, 1), move_side) + torch.mul( + ys_pr_velocity[..., 1].permute(2, 0, 1), ~move_side.long()) + ys_pr_velocity = ys_pr_velocity.permute(1, 2, 0) + pr_velocity = torch.cat([ns_pr_velocity, ys_pr_velocity], dim=2) + else: + gt_velocity = (gt_kps[1:] - gt_kps[:-1]).norm(p=2, dim=-1) + pr_velocity = (pr_kps[:, 1:] - pr_kps[:, :-1]).norm(p=2, dim=-1) + + if weight: + w = F.softmax(gt_velocity.sum(dim=1).normal_(), dim=0) + else: + w = 1 / gt_velocity.shape[0] + + v_diff = ((pr_velocity - gt_velocity).abs().sum(dim=-1) * w).sum(dim=-1).mean() + + return v_diff + + +def LVD(gt_kps, pr_kps, symmetrical=False, weight=False): + gt_kps = gt_kps.squeeze() + pr_kps = pr_kps.squeeze() + if len(pr_kps.shape) == 4: + return Batch_LVD(gt_kps, pr_kps, symmetrical, weight) + # length = np.minimum(gt_kps.shape[0], pr_kps.shape[0]) + length = gt_kps.shape[0]-10 + # gt_kps = gt_kps[25:length] + # pr_kps = pr_kps[25:length] #(T, D) + # if pr_kps.shape[0] < gt_kps.shape[0]: + # pr_kps = np.pad(pr_kps, [[0, int(gt_kps.shape[0]-pr_kps.shape[0])], [0, 0]], mode='constant') + + gt_velocity = (gt_kps[1:] - gt_kps[:-1]).norm(p=2, dim=-1) + pr_velocity = (pr_kps[1:] - pr_kps[:-1]).norm(p=2, dim=-1) + + return (pr_velocity-gt_velocity).abs().sum(dim=-1).mean() + +def diversity(kps): + ''' + kps: bs, seq, dim + ''' + dis_list = [] + #the distance between each pair + for i in range(kps.shape[0]): + for j in range(i+1, kps.shape[0]): + seq_i = kps[i] + seq_j = kps[j] + + dis = np.mean(np.abs(seq_i - seq_j)) + dis_list.append(dis) + return np.mean(dis_list) diff --git a/evaluation/mode_transition.py b/evaluation/mode_transition.py new file mode 100644 index 0000000000000000000000000000000000000000..92cd0e5ecfe688b7a8add932af0303bd8d5ed947 --- /dev/null +++ b/evaluation/mode_transition.py @@ -0,0 +1,60 @@ +import os +import sys +sys.path.append(os.getcwd()) + +from glob import glob + +from argparse import ArgumentParser +import json + +from evaluation.util import * +from evaluation.metrics import * +from tqdm import tqdm + +parser = ArgumentParser() +parser.add_argument('--speaker', required=True, type=str) +parser.add_argument('--post_fix', nargs='+', default=['paper_model'], type=str) +args = parser.parse_args() + +speaker = args.speaker +test_audios = sorted(glob('pose_dataset/videos/test_audios/%s/*.wav'%(speaker))) + +precision_list=[] +recall_list=[] +accuracy_list=[] + +for aud in tqdm(test_audios): + base_name = os.path.splitext(aud)[0] + gt_path = get_full_path(aud, speaker, 'val') + _, gt_poses, _ = get_gts(gt_path) + if gt_poses.shape[0] < 50: + continue + gt_poses = gt_poses[np.newaxis,...] + # print(gt_poses.shape)#(seq_len, 135*2)pose, lhand, rhand, face + for post_fix in args.post_fix: + pred_path = base_name + '_'+post_fix+'.json' + pred_poses = np.array(json.load(open(pred_path))) + # print(pred_poses.shape)#(B, seq_len, 108) + pred_poses = cvt25(pred_poses, gt_poses) + # print(pred_poses.shape)#(B, seq, pose_dim) + + gt_valid_points = valid_points(gt_poses) + pred_valid_points = valid_points(pred_poses) + + # print(gt_valid_points.shape, pred_valid_points.shape) + + gt_mode_transition_seq = mode_transition_seq(gt_valid_points, speaker)#(B, N) + pred_mode_transition_seq = mode_transition_seq(pred_valid_points, speaker)#(B, N) + + # baseline = np.random.randint(0, 2, size=pred_mode_transition_seq.shape) + # pred_mode_transition_seq = baseline + precision, recall, accuracy = mode_transition_consistency(pred_mode_transition_seq, gt_mode_transition_seq) + precision_list.append(precision) + recall_list.append(recall) + accuracy_list.append(accuracy) +print(len(precision_list), len(recall_list), len(accuracy_list)) +precision_list = np.mean(precision_list) +recall_list = np.mean(recall_list) +accuracy_list = np.mean(accuracy_list) + +print('precision, recall, accu:', precision_list, recall_list, accuracy_list) diff --git a/evaluation/peak_velocity.py b/evaluation/peak_velocity.py new file mode 100644 index 0000000000000000000000000000000000000000..3842b918375176099cd60a8f9ede50d8920b3e4a --- /dev/null +++ b/evaluation/peak_velocity.py @@ -0,0 +1,65 @@ +import os +import sys +sys.path.append(os.getcwd()) + +from glob import glob + +from argparse import ArgumentParser +import json + +from evaluation.util import * +from evaluation.metrics import * +from tqdm import tqdm + +parser = ArgumentParser() +parser.add_argument('--speaker', required=True, type=str) +parser.add_argument('--post_fix', nargs='+', default=['paper_model'], type=str) +args = parser.parse_args() + +speaker = args.speaker +test_audios = sorted(glob('pose_dataset/videos/test_audios/%s/*.wav'%(speaker))) + +gt_consistency_list=[] +pred_consistency_list=[] + +for aud in tqdm(test_audios): + base_name = os.path.splitext(aud)[0] + gt_path = get_full_path(aud, speaker, 'val') + _, gt_poses, _ = get_gts(gt_path) + gt_poses = gt_poses[np.newaxis,...] + # print(gt_poses.shape)#(seq_len, 135*2)pose, lhand, rhand, face + for post_fix in args.post_fix: + pred_path = base_name + '_'+post_fix+'.json' + pred_poses = np.array(json.load(open(pred_path))) + # print(pred_poses.shape)#(B, seq_len, 108) + pred_poses = cvt25(pred_poses, gt_poses) + # print(pred_poses.shape)#(B, seq, pose_dim) + + gt_valid_points = hand_points(gt_poses) + pred_valid_points = hand_points(pred_poses) + + gt_velocity = peak_velocity(gt_valid_points, order=2) + pred_velocity = peak_velocity(pred_valid_points, order=2) + + gt_consistency = velocity_consistency(gt_velocity, pred_velocity) + pred_consistency = velocity_consistency(pred_velocity, gt_velocity) + + gt_consistency_list.append(gt_consistency) + pred_consistency_list.append(pred_consistency) + +gt_consistency_list = np.concatenate(gt_consistency_list) +pred_consistency_list = np.concatenate(pred_consistency_list) + +print(gt_consistency_list.max(), gt_consistency_list.min()) +print(pred_consistency_list.max(), pred_consistency_list.min()) +print(np.mean(gt_consistency_list), np.mean(pred_consistency_list)) +print(np.std(gt_consistency_list), np.std(pred_consistency_list)) + +draw_cdf(gt_consistency_list, save_name='%s_gt.jpg'%(speaker), color='slateblue') +draw_cdf(pred_consistency_list, save_name='%s_pred.jpg'%(speaker), color='lightskyblue') + +to_excel(gt_consistency_list, '%s_gt.xlsx'%(speaker)) +to_excel(pred_consistency_list, '%s_pred.xlsx'%(speaker)) + +np.save('%s_gt.npy'%(speaker), gt_consistency_list) +np.save('%s_pred.npy'%(speaker), pred_consistency_list) \ No newline at end of file diff --git a/evaluation/util.py b/evaluation/util.py new file mode 100644 index 0000000000000000000000000000000000000000..a3c18a0ab89ca812e4a2126bd18ee8e588b7ec43 --- /dev/null +++ b/evaluation/util.py @@ -0,0 +1,148 @@ +import os +from glob import glob +import numpy as np +import json +from matplotlib import pyplot as plt +import pandas as pd +def get_gts(clip): + ''' + clip: abs path to the clip dir + ''' + keypoints_files = sorted(glob(os.path.join(clip, 'keypoints_new/person_1')+'/*.json')) + + upper_body_points = list(np.arange(0, 25)) + poses = [] + confs = [] + neck_to_nose_len = [] + mean_position = [] + for kp_file in keypoints_files: + kp_load = json.load(open(kp_file, 'r'))['people'][0] + posepts = kp_load['pose_keypoints_2d'] + lhandpts = kp_load['hand_left_keypoints_2d'] + rhandpts = kp_load['hand_right_keypoints_2d'] + facepts = kp_load['face_keypoints_2d'] + + neck = np.array(posepts).reshape(-1,3)[1] + nose = np.array(posepts).reshape(-1,3)[0] + x_offset = abs(neck[0]-nose[0]) + y_offset = abs(neck[1]-nose[1]) + neck_to_nose_len.append(y_offset) + mean_position.append([neck[0],neck[1]]) + + keypoints=np.array(posepts+lhandpts+rhandpts+facepts).reshape(-1,3)[:,:2] + + upper_body = keypoints[upper_body_points, :] + hand_points = keypoints[25:, :] + keypoints = np.vstack([upper_body, hand_points]) + + poses.append(keypoints) + + if len(neck_to_nose_len) > 0: + scale_factor = np.mean(neck_to_nose_len) + else: + raise ValueError(clip) + mean_position = np.mean(np.array(mean_position), axis=0) + + unlocalized_poses = np.array(poses).copy() + localized_poses = [] + for i in range(len(poses)): + keypoints = poses[i] + neck = keypoints[1].copy() + + keypoints[:, 0] = (keypoints[:, 0] - neck[0]) / scale_factor + keypoints[:, 1] = (keypoints[:, 1] - neck[1]) / scale_factor + localized_poses.append(keypoints.reshape(-1)) + + localized_poses=np.array(localized_poses) + return unlocalized_poses, localized_poses, (scale_factor, mean_position) + +def get_full_path(wav_name, speaker, split): + ''' + get clip path from aud file + ''' + wav_name = os.path.basename(wav_name) + wav_name = os.path.splitext(wav_name)[0] + clip_name, vid_name = wav_name[:10], wav_name[11:] + + full_path = os.path.join('pose_dataset/videos/', speaker, 'clips', vid_name, 'images/half', split, clip_name) + + assert os.path.isdir(full_path), full_path + + return full_path + +def smooth(res): + ''' + res: (B, seq_len, pose_dim) + ''' + window = [res[:, 7, :], res[:, 8, :], res[:, 9, :], res[:, 10, :], res[:, 11, :], res[:, 12, :]] + w_size=7 + for i in range(10, res.shape[1]-3): + window.append(res[:, i+3, :]) + if len(window) > w_size: + window = window[1:] + + if (i%25) in [22, 23, 24, 0, 1, 2, 3]: + res[:, i, :] = np.mean(window, axis=1) + + return res + +def cvt25(pred_poses, gt_poses=None): + ''' + gt_poses: (1, seq_len, 270), 135 *2 + pred_poses: (B, seq_len, 108), 54 * 2 + ''' + if gt_poses is None: + gt_poses = np.zeros_like(pred_poses) + else: + gt_poses = gt_poses.repeat(pred_poses.shape[0], axis=0) + + length = min(pred_poses.shape[1], gt_poses.shape[1]) + pred_poses = pred_poses[:, :length, :] + gt_poses = gt_poses[:, :length, :] + gt_poses = gt_poses.reshape(gt_poses.shape[0], gt_poses.shape[1], -1, 2) + pred_poses = pred_poses.reshape(pred_poses.shape[0], pred_poses.shape[1], -1, 2) + + gt_poses[:, :, [1, 2, 3, 4, 5, 6, 7], :] = pred_poses[:, :, 1:8, :] + gt_poses[:, :, 25:25+21+21, :] = pred_poses[:, :, 12:, :] + + return gt_poses.reshape(gt_poses.shape[0], gt_poses.shape[1], -1) + +def hand_points(seq): + ''' + seq: (B, seq_len, 135*2) + hands only + ''' + hand_idx = [1, 2, 3, 4,5 ,6,7] + list(range(25, 25+21+21)) + seq = seq.reshape(seq.shape[0], seq.shape[1], -1, 2) + return seq[:, :, hand_idx, :].reshape(seq.shape[0], seq.shape[1], -1) + +def valid_points(seq): + ''' + hands with some head points + ''' + valid_idx = [0, 1, 2, 3, 4,5 ,6,7, 8, 9, 10, 11] + list(range(25, 25+21+21)) + seq = seq.reshape(seq.shape[0], seq.shape[1], -1, 2) + + seq = seq[:, :, valid_idx, :].reshape(seq.shape[0], seq.shape[1], -1) + assert seq.shape[-1] == 108, seq.shape + return seq + +def draw_cdf(seq, save_name='cdf.jpg', color='slatebule'): + plt.figure() + plt.hist(seq, bins=100, range=(0, 100), color=color) + plt.savefig(save_name) + +def to_excel(seq, save_name='res.xlsx'): + ''' + seq: (T) + ''' + df = pd.DataFrame(seq) + writer = pd.ExcelWriter(save_name) + df.to_excel(writer, 'sheet1') + writer.save() + writer.close() + + +if __name__ == '__main__': + random_data = np.random.randint(0, 10, 100) + draw_cdf(random_data) \ No newline at end of file diff --git a/experiments/2022-10-15-smplx_S2G-face-3d/smplx_S2G.json b/experiments/2022-10-15-smplx_S2G-face-3d/smplx_S2G.json new file mode 100644 index 0000000000000000000000000000000000000000..fb725cf7dc1bc46a97a543f3aba67a6331adf05c --- /dev/null +++ b/experiments/2022-10-15-smplx_S2G-face-3d/smplx_S2G.json @@ -0,0 +1,85 @@ +{ + "config_root_path": "/is/cluster/scratch/hyi/ExpressiveBody/SMPLifyX4/scripts", + "dataset_load_mode": "json", + "store_file_path": "store.pkl", + "smplx_npz_path": "visualise/smplx_model/SMPLX_NEUTRAL_2020.npz", + "extra_joint_path": "visualise/smplx_model/smplx_extra_joints.yaml", + "j14_regressor_path": "visualise/smplx_model/SMPLX_to_J14.pkl", + "param": { + "w_j": 1, + "w_b": 1, + "w_h": 1 + }, + "Data": { + "data_root": "../../expressive_body-V0.7/", + "pklname": "_3d_ta_wv2_3p.pkl", + "pose": { + "normalization": false, + "convert_to_6d": false, + "norm_method": "all", + "augmentation": false, + "generate_length": 88, + "pre_pose_length": 0, + "pose_dim": 99, + "expression": true + }, + "aud": { + "feat_method": "mfcc", + "aud_feat_dim": 64, + "aud_feat_win_size": null, + "context_info": false + } + }, + "Model": { + "model_type": "face", + "model_name": "s2g_face", + "encoder_choice": "faceformer", + "operation_kernel": "rnn", + "interaction": "concat", + "rnn_cell": "gru", + "T_layer_norm": true, + "bidirectional": true, + "residual": true, + "use_template": true, + "template_length": 32, + "gan": false, + "separate": true, + "l1_joints": false, + "radianloss": true + }, + "DataLoader": { + "batch_size": 1, + "num_workers": 0 + }, + "Train": { + "epochs": 100, + "max_gradient_norm": 5, + "recon_input": true, + "learning_rate": { + "generator_learning_rate": 1e-3, + "discriminator_learning_rate": 1e-4 + }, + "weights": { + "kl_tolerance": 0.2, + "velocity_length": 10, + "keypoint_loss_weight": 1, + "recon_input_weight": 1, + "kl_loss_weight": 0.2, + "kl_start_weight": 1e-5, + "kl_decay_rate": 0.9995, + "vel_loss_weight": 1, + "vel_start_weight": 1e-5, + "vel_decay_rate": 0.99995, + "r_loss_weight": 1, + "zero_loss_weight": 0, + "gan_loss_weight": 0.1, + "k": 1.0 + } + }, + "Log": { + "save_every": 50, + "print_every": 1000, + "name": "face-sgd-3p-wv2" + } +} + \ No newline at end of file diff --git a/experiments/2022-10-15-smplx_S2G-face-3d/train.log b/experiments/2022-10-15-smplx_S2G-face-3d/train.log new file mode 100644 index 0000000000000000000000000000000000000000..9afc077395cf7ee77434cee799a05bd704a50b6f --- /dev/null +++ b/experiments/2022-10-15-smplx_S2G-face-3d/train.log @@ -0,0 +1,1274 @@ +2022-10-15 19:52:36,900-341-start_training +2022-10-15 19:52:36,900-344-epoch:0 +2022-10-15 19:54:38,915-282-global_steps:1000,grad:5.5552,MSELoss:0.0300,exp_loss:0.7135 +2022-10-15 19:56:50,220-282-global_steps:2000,grad:4.9700,MSELoss:0.0271,exp_loss:0.7086 +2022-10-15 19:59:00,286-282-global_steps:3000,grad:4.9525,MSELoss:0.0259,exp_loss:0.7014 +2022-10-15 20:01:10,543-282-global_steps:4000,grad:4.9598,MSELoss:0.0251,exp_loss:0.6937 +2022-10-15 20:03:23,056-282-global_steps:5000,grad:4.9347,MSELoss:0.0244,exp_loss:0.6881 +2022-10-15 20:05:30,689-282-global_steps:6000,grad:4.8989,MSELoss:0.0238,exp_loss:0.6826 +2022-10-15 20:07:39,467-282-global_steps:7000,grad:4.8789,MSELoss:0.0233,exp_loss:0.6788 +2022-10-15 20:09:48,589-282-global_steps:8000,grad:4.8330,MSELoss:0.0230,exp_loss:0.6756 +2022-10-15 20:11:51,487-282-global_steps:9000,grad:4.7915,MSELoss:0.0228,exp_loss:0.6735 +2022-10-15 20:13:57,937-282-global_steps:10000,grad:4.7621,MSELoss:0.0225,exp_loss:0.6709 +2022-10-15 20:16:08,850-282-global_steps:11000,grad:4.7329,MSELoss:0.0223,exp_loss:0.6675 +2022-10-15 20:17:42,592-344-epoch:1 +2022-10-15 20:18:16,963-282-global_steps:12000,grad:4.3313,MSELoss:0.0204,exp_loss:0.6319 +2022-10-15 20:20:30,052-282-global_steps:13000,grad:4.4888,MSELoss:0.0196,exp_loss:0.6323 +2022-10-15 20:22:42,722-282-global_steps:14000,grad:4.4369,MSELoss:0.0195,exp_loss:0.6313 +2022-10-15 20:24:55,430-282-global_steps:15000,grad:4.4139,MSELoss:0.0196,exp_loss:0.6288 +2022-10-15 20:27:04,591-282-global_steps:16000,grad:4.4010,MSELoss:0.0195,exp_loss:0.6273 +2022-10-15 20:29:06,276-282-global_steps:17000,grad:4.4339,MSELoss:0.0195,exp_loss:0.6273 +2022-10-15 20:31:14,968-282-global_steps:18000,grad:4.4677,MSELoss:0.0194,exp_loss:0.6251 +2022-10-15 20:33:27,138-282-global_steps:19000,grad:4.4642,MSELoss:0.0194,exp_loss:0.6229 +2022-10-15 20:35:39,547-282-global_steps:20000,grad:4.4704,MSELoss:0.0193,exp_loss:0.6212 +2022-10-15 20:37:48,955-282-global_steps:21000,grad:4.4751,MSELoss:0.0193,exp_loss:0.6205 +2022-10-15 20:40:02,301-282-global_steps:22000,grad:4.4865,MSELoss:0.0193,exp_loss:0.6207 +2022-10-15 20:42:15,494-282-global_steps:23000,grad:4.4948,MSELoss:0.0192,exp_loss:0.6202 +2022-10-15 20:43:18,737-344-epoch:2 +2022-10-15 20:44:29,033-282-global_steps:24000,grad:4.6305,MSELoss:0.0187,exp_loss:0.6049 +2022-10-15 20:46:43,712-282-global_steps:25000,grad:4.6002,MSELoss:0.0186,exp_loss:0.6004 +2022-10-15 20:48:55,344-282-global_steps:26000,grad:4.5613,MSELoss:0.0186,exp_loss:0.5997 +2022-10-15 20:51:07,613-282-global_steps:27000,grad:4.5316,MSELoss:0.0186,exp_loss:0.6011 +2022-10-15 20:53:09,198-282-global_steps:28000,grad:4.5381,MSELoss:0.0187,exp_loss:0.6022 +2022-10-15 20:55:15,393-282-global_steps:29000,grad:4.5524,MSELoss:0.0187,exp_loss:0.6016 +2022-10-15 20:57:18,444-282-global_steps:30000,grad:4.5314,MSELoss:0.0186,exp_loss:0.5998 +2022-10-15 20:59:14,910-282-global_steps:31000,grad:4.5522,MSELoss:0.0186,exp_loss:0.6007 +2022-10-15 21:01:10,850-282-global_steps:32000,grad:4.5805,MSELoss:0.0186,exp_loss:0.6018 +2022-10-15 21:03:01,501-282-global_steps:33000,grad:4.5810,MSELoss:0.0186,exp_loss:0.6007 +2022-10-15 21:05:05,649-282-global_steps:34000,grad:4.5896,MSELoss:0.0186,exp_loss:0.5999 +2022-10-15 21:07:02,654-282-global_steps:35000,grad:4.5904,MSELoss:0.0186,exp_loss:0.6005 +2022-10-15 21:07:27,333-344-epoch:3 +2022-10-15 21:09:08,460-282-global_steps:36000,grad:4.7495,MSELoss:0.0182,exp_loss:0.5863 +2022-10-15 21:11:12,614-282-global_steps:37000,grad:4.8080,MSELoss:0.0183,exp_loss:0.5825 +2022-10-15 21:13:19,193-282-global_steps:38000,grad:4.8716,MSELoss:0.0183,exp_loss:0.5857 +2022-10-15 21:15:28,511-282-global_steps:39000,grad:4.8796,MSELoss:0.0182,exp_loss:0.5865 +2022-10-15 21:17:30,254-282-global_steps:40000,grad:4.9164,MSELoss:0.0182,exp_loss:0.5874 +2022-10-15 21:19:41,926-282-global_steps:41000,grad:4.8752,MSELoss:0.0181,exp_loss:0.5866 +2022-10-15 21:21:55,136-282-global_steps:42000,grad:4.8701,MSELoss:0.0181,exp_loss:0.5860 +2022-10-15 21:24:00,435-282-global_steps:43000,grad:4.8792,MSELoss:0.0182,exp_loss:0.5862 +2022-10-15 21:26:09,422-282-global_steps:44000,grad:4.8821,MSELoss:0.0181,exp_loss:0.5869 +2022-10-15 21:28:19,493-282-global_steps:45000,grad:4.8774,MSELoss:0.0181,exp_loss:0.5867 +2022-10-15 21:30:26,560-282-global_steps:46000,grad:4.9022,MSELoss:0.0181,exp_loss:0.5878 +2022-10-15 21:32:16,766-344-epoch:4 +2022-10-15 21:32:22,205-282-global_steps:47000,grad:4.5705,MSELoss:0.0182,exp_loss:0.5518 +2022-10-15 21:34:27,569-282-global_steps:48000,grad:4.9349,MSELoss:0.0180,exp_loss:0.5747 +2022-10-15 21:36:40,633-282-global_steps:49000,grad:5.1179,MSELoss:0.0180,exp_loss:0.5717 +2022-10-15 21:38:50,146-282-global_steps:50000,grad:5.2077,MSELoss:0.0180,exp_loss:0.5739 +2022-10-15 21:40:58,161-282-global_steps:51000,grad:5.2328,MSELoss:0.0179,exp_loss:0.5737 +2022-10-15 21:43:10,095-282-global_steps:52000,grad:5.3003,MSELoss:0.0179,exp_loss:0.5752 +2022-10-15 21:45:19,800-282-global_steps:53000,grad:5.3047,MSELoss:0.0179,exp_loss:0.5754 +2022-10-15 21:47:34,250-282-global_steps:54000,grad:5.3156,MSELoss:0.0178,exp_loss:0.5740 +2022-10-15 21:49:34,159-282-global_steps:55000,grad:5.3502,MSELoss:0.0179,exp_loss:0.5759 +2022-10-15 21:51:44,179-282-global_steps:56000,grad:5.3293,MSELoss:0.0179,exp_loss:0.5751 +2022-10-15 21:53:50,920-282-global_steps:57000,grad:5.3649,MSELoss:0.0178,exp_loss:0.5755 +2022-10-15 21:56:01,226-282-global_steps:58000,grad:5.3593,MSELoss:0.0179,exp_loss:0.5758 +2022-10-15 21:57:11,483-344-epoch:5 +2022-10-15 21:57:38,667-282-global_steps:59000,grad:4.8690,MSELoss:0.0176,exp_loss:0.5541 +2022-10-15 21:59:04,734-282-global_steps:60000,grad:5.4959,MSELoss:0.0179,exp_loss:0.5678 +2022-10-15 22:00:53,765-282-global_steps:61000,grad:5.5457,MSELoss:0.0178,exp_loss:0.5664 +2022-10-15 22:03:05,905-282-global_steps:62000,grad:5.5827,MSELoss:0.0177,exp_loss:0.5656 +2022-10-15 22:05:10,987-282-global_steps:63000,grad:5.5513,MSELoss:0.0177,exp_loss:0.5642 +2022-10-15 22:06:56,834-282-global_steps:64000,grad:5.6129,MSELoss:0.0177,exp_loss:0.5645 +2022-10-15 22:08:42,101-282-global_steps:65000,grad:5.6841,MSELoss:0.0177,exp_loss:0.5648 +2022-10-15 22:10:39,931-282-global_steps:66000,grad:5.6780,MSELoss:0.0177,exp_loss:0.5646 +2022-10-15 22:12:36,050-282-global_steps:67000,grad:5.6875,MSELoss:0.0177,exp_loss:0.5648 +2022-10-15 22:14:41,466-282-global_steps:68000,grad:5.7146,MSELoss:0.0177,exp_loss:0.5656 +2022-10-15 22:16:49,737-282-global_steps:69000,grad:5.7726,MSELoss:0.0177,exp_loss:0.5661 +2022-10-15 22:18:53,570-282-global_steps:70000,grad:5.7716,MSELoss:0.0177,exp_loss:0.5661 +2022-10-15 22:19:48,175-344-epoch:6 +2022-10-15 22:21:02,275-282-global_steps:71000,grad:5.9961,MSELoss:0.0178,exp_loss:0.5628 +2022-10-15 22:23:22,199-282-global_steps:72000,grad:6.1042,MSELoss:0.0178,exp_loss:0.5581 +2022-10-15 22:25:33,244-282-global_steps:73000,grad:6.1626,MSELoss:0.0177,exp_loss:0.5556 +2022-10-15 22:27:40,736-282-global_steps:74000,grad:6.2342,MSELoss:0.0176,exp_loss:0.5565 +2022-10-15 22:30:00,436-282-global_steps:75000,grad:6.2209,MSELoss:0.0176,exp_loss:0.5575 +2022-10-15 22:32:12,180-282-global_steps:76000,grad:6.2735,MSELoss:0.0175,exp_loss:0.5570 +2022-10-15 22:34:30,402-282-global_steps:77000,grad:6.2964,MSELoss:0.0175,exp_loss:0.5561 +2022-10-15 22:36:46,994-282-global_steps:78000,grad:6.3178,MSELoss:0.0176,exp_loss:0.5557 +2022-10-15 22:39:03,982-282-global_steps:79000,grad:6.3380,MSELoss:0.0176,exp_loss:0.5553 +2022-10-15 22:41:18,606-282-global_steps:80000,grad:6.3678,MSELoss:0.0175,exp_loss:0.5549 +2022-10-15 22:43:29,729-282-global_steps:81000,grad:6.3667,MSELoss:0.0175,exp_loss:0.5542 +2022-10-15 22:45:41,261-282-global_steps:82000,grad:6.3571,MSELoss:0.0175,exp_loss:0.5538 +2022-10-15 22:45:59,713-344-epoch:7 +2022-10-15 22:47:54,559-282-global_steps:83000,grad:6.4125,MSELoss:0.0174,exp_loss:0.5516 +2022-10-15 22:50:11,640-282-global_steps:84000,grad:6.6096,MSELoss:0.0174,exp_loss:0.5487 +2022-10-15 22:52:27,336-282-global_steps:85000,grad:6.4989,MSELoss:0.0172,exp_loss:0.5424 +2022-10-15 22:54:40,339-282-global_steps:86000,grad:6.5516,MSELoss:0.0173,exp_loss:0.5430 +2022-10-15 22:56:49,317-282-global_steps:87000,grad:6.5847,MSELoss:0.0173,exp_loss:0.5430 +2022-10-15 22:59:03,358-282-global_steps:88000,grad:6.6799,MSELoss:0.0173,exp_loss:0.5431 +2022-10-15 23:01:10,407-282-global_steps:89000,grad:6.7475,MSELoss:0.0173,exp_loss:0.5430 +2022-10-15 23:03:22,335-282-global_steps:90000,grad:6.7528,MSELoss:0.0173,exp_loss:0.5431 +2022-10-15 23:05:31,388-282-global_steps:91000,grad:6.7619,MSELoss:0.0173,exp_loss:0.5427 +2022-10-15 23:07:41,063-282-global_steps:92000,grad:6.7673,MSELoss:0.0173,exp_loss:0.5425 +2022-10-15 23:09:52,147-282-global_steps:93000,grad:6.8218,MSELoss:0.0173,exp_loss:0.5430 +2022-10-15 23:11:52,110-344-epoch:8 +2022-10-15 23:12:05,682-282-global_steps:94000,grad:6.5304,MSELoss:0.0174,exp_loss:0.5514 +2022-10-15 23:14:15,959-282-global_steps:95000,grad:6.5703,MSELoss:0.0171,exp_loss:0.5190 +2022-10-15 23:16:25,895-282-global_steps:96000,grad:6.8564,MSELoss:0.0171,exp_loss:0.5229 +2022-10-15 23:18:36,829-282-global_steps:97000,grad:6.9823,MSELoss:0.0171,exp_loss:0.5270 +2022-10-15 23:20:48,878-282-global_steps:98000,grad:7.0438,MSELoss:0.0172,exp_loss:0.5298 +2022-10-15 23:22:49,381-282-global_steps:99000,grad:7.1094,MSELoss:0.0171,exp_loss:0.5302 +2022-10-15 23:24:44,395-282-global_steps:100000,grad:7.1833,MSELoss:0.0172,exp_loss:0.5287 +2022-10-15 23:26:39,398-282-global_steps:101000,grad:7.1633,MSELoss:0.0172,exp_loss:0.5302 +2022-10-15 23:28:30,356-282-global_steps:102000,grad:7.1728,MSELoss:0.0172,exp_loss:0.5296 +2022-10-15 23:30:33,366-282-global_steps:103000,grad:7.1950,MSELoss:0.0171,exp_loss:0.5292 +2022-10-15 23:32:23,953-282-global_steps:104000,grad:7.2331,MSELoss:0.0171,exp_loss:0.5296 +2022-10-15 23:34:34,093-282-global_steps:105000,grad:7.2892,MSELoss:0.0172,exp_loss:0.5299 +2022-10-15 23:35:46,615-344-epoch:9 +2022-10-15 23:36:24,001-282-global_steps:106000,grad:7.6140,MSELoss:0.0173,exp_loss:0.5237 +2022-10-15 23:38:07,904-282-global_steps:107000,grad:7.3770,MSELoss:0.0171,exp_loss:0.5159 +2022-10-15 23:40:04,613-282-global_steps:108000,grad:7.6165,MSELoss:0.0170,exp_loss:0.5156 +2022-10-15 23:42:16,364-282-global_steps:109000,grad:7.6985,MSELoss:0.0170,exp_loss:0.5151 +2022-10-15 23:44:24,304-282-global_steps:110000,grad:7.7379,MSELoss:0.0171,exp_loss:0.5180 +2022-10-15 23:46:31,142-282-global_steps:111000,grad:7.7788,MSELoss:0.0171,exp_loss:0.5180 +2022-10-15 23:48:43,048-282-global_steps:112000,grad:7.8129,MSELoss:0.0171,exp_loss:0.5179 +2022-10-15 23:50:51,875-282-global_steps:113000,grad:7.8385,MSELoss:0.0171,exp_loss:0.5179 +2022-10-15 23:53:03,835-282-global_steps:114000,grad:7.8572,MSELoss:0.0171,exp_loss:0.5173 +2022-10-15 23:55:01,885-282-global_steps:115000,grad:7.9019,MSELoss:0.0171,exp_loss:0.5182 +2022-10-15 23:57:10,663-282-global_steps:116000,grad:7.9209,MSELoss:0.0170,exp_loss:0.5186 +2022-10-15 23:59:23,548-282-global_steps:117000,grad:7.9459,MSELoss:0.0170,exp_loss:0.5186 +2022-10-16 00:00:13,051-344-epoch:10 +2022-10-16 00:01:23,290-282-global_steps:118000,grad:7.9721,MSELoss:0.0168,exp_loss:0.5086 +2022-10-16 00:03:17,106-282-global_steps:119000,grad:7.9133,MSELoss:0.0169,exp_loss:0.5012 +2022-10-16 00:05:11,372-282-global_steps:120000,grad:7.9979,MSELoss:0.0170,exp_loss:0.5037 +2022-10-16 00:07:28,423-282-global_steps:121000,grad:8.0736,MSELoss:0.0169,exp_loss:0.5037 +2022-10-16 00:09:37,896-282-global_steps:122000,grad:8.1468,MSELoss:0.0168,exp_loss:0.5029 +2022-10-16 00:11:46,060-282-global_steps:123000,grad:8.1783,MSELoss:0.0168,exp_loss:0.5040 +2022-10-16 00:13:48,333-282-global_steps:124000,grad:8.1583,MSELoss:0.0168,exp_loss:0.5049 +2022-10-16 00:15:56,155-282-global_steps:125000,grad:8.2404,MSELoss:0.0168,exp_loss:0.5055 +2022-10-16 00:18:07,137-282-global_steps:126000,grad:8.2741,MSELoss:0.0168,exp_loss:0.5053 +2022-10-16 00:20:16,707-282-global_steps:127000,grad:8.3121,MSELoss:0.0168,exp_loss:0.5062 +2022-10-16 00:22:27,226-282-global_steps:128000,grad:8.3259,MSELoss:0.0169,exp_loss:0.5069 +2022-10-16 00:24:37,139-282-global_steps:129000,grad:8.3678,MSELoss:0.0169,exp_loss:0.5065 +2022-10-16 00:24:51,257-344-epoch:11 +2022-10-16 00:26:46,020-282-global_steps:130000,grad:8.3121,MSELoss:0.0166,exp_loss:0.5002 +2022-10-16 00:28:49,823-282-global_steps:131000,grad:8.5299,MSELoss:0.0168,exp_loss:0.4936 +2022-10-16 00:31:00,973-282-global_steps:132000,grad:8.6632,MSELoss:0.0168,exp_loss:0.4941 +2022-10-16 00:33:12,021-282-global_steps:133000,grad:8.5837,MSELoss:0.0168,exp_loss:0.4942 +2022-10-16 00:35:18,410-282-global_steps:134000,grad:8.6897,MSELoss:0.0168,exp_loss:0.4939 +2022-10-16 00:37:26,939-282-global_steps:135000,grad:8.7060,MSELoss:0.0168,exp_loss:0.4936 +2022-10-16 00:39:37,177-282-global_steps:136000,grad:8.7794,MSELoss:0.0168,exp_loss:0.4944 +2022-10-16 00:41:52,395-282-global_steps:137000,grad:8.8019,MSELoss:0.0168,exp_loss:0.4940 +2022-10-16 00:44:05,972-282-global_steps:138000,grad:8.8174,MSELoss:0.0167,exp_loss:0.4936 +2022-10-16 00:46:24,515-282-global_steps:139000,grad:8.8732,MSELoss:0.0167,exp_loss:0.4946 +2022-10-16 00:48:41,268-282-global_steps:140000,grad:8.8858,MSELoss:0.0167,exp_loss:0.4946 +2022-10-16 00:50:36,004-344-epoch:12 +2022-10-16 00:50:57,404-282-global_steps:141000,grad:8.8348,MSELoss:0.0166,exp_loss:0.4898 +2022-10-16 00:53:04,540-282-global_steps:142000,grad:8.6239,MSELoss:0.0168,exp_loss:0.4876 +2022-10-16 00:55:14,707-282-global_steps:143000,grad:8.7451,MSELoss:0.0166,exp_loss:0.4806 +2022-10-16 00:57:26,584-282-global_steps:144000,grad:8.8957,MSELoss:0.0165,exp_loss:0.4815 +2022-10-16 00:59:33,551-282-global_steps:145000,grad:8.8480,MSELoss:0.0166,exp_loss:0.4823 +2022-10-16 01:01:45,479-282-global_steps:146000,grad:8.9106,MSELoss:0.0166,exp_loss:0.4823 +2022-10-16 01:03:59,883-282-global_steps:147000,grad:8.9418,MSELoss:0.0166,exp_loss:0.4813 +2022-10-16 01:06:09,487-282-global_steps:148000,grad:8.9506,MSELoss:0.0165,exp_loss:0.4809 +2022-10-16 01:08:12,521-282-global_steps:149000,grad:8.9997,MSELoss:0.0166,exp_loss:0.4819 +2022-10-16 01:10:19,353-282-global_steps:150000,grad:9.0030,MSELoss:0.0165,exp_loss:0.4811 +2022-10-16 01:12:32,755-282-global_steps:151000,grad:9.0372,MSELoss:0.0165,exp_loss:0.4812 +2022-10-16 01:14:46,696-282-global_steps:152000,grad:9.0617,MSELoss:0.0165,exp_loss:0.4819 +2022-10-16 01:16:02,776-344-epoch:13 +2022-10-16 01:16:56,956-282-global_steps:153000,grad:8.6090,MSELoss:0.0160,exp_loss:0.4653 +2022-10-16 01:19:10,864-282-global_steps:154000,grad:8.9259,MSELoss:0.0163,exp_loss:0.4689 +2022-10-16 01:21:21,699-282-global_steps:155000,grad:9.0368,MSELoss:0.0163,exp_loss:0.4695 +2022-10-16 01:23:33,751-282-global_steps:156000,grad:9.1259,MSELoss:0.0163,exp_loss:0.4718 +2022-10-16 01:25:43,118-282-global_steps:157000,grad:9.0642,MSELoss:0.0163,exp_loss:0.4689 +2022-10-16 01:27:56,515-282-global_steps:158000,grad:9.1278,MSELoss:0.0163,exp_loss:0.4690 +2022-10-16 01:30:05,998-282-global_steps:159000,grad:9.1931,MSELoss:0.0163,exp_loss:0.4694 +2022-10-16 01:32:18,197-282-global_steps:160000,grad:9.2730,MSELoss:0.0163,exp_loss:0.4693 +2022-10-16 01:34:32,711-282-global_steps:161000,grad:9.2841,MSELoss:0.0163,exp_loss:0.4696 +2022-10-16 01:36:45,642-282-global_steps:162000,grad:9.3032,MSELoss:0.0163,exp_loss:0.4698 +2022-10-16 01:38:43,895-282-global_steps:163000,grad:9.3544,MSELoss:0.0163,exp_loss:0.4707 +2022-10-16 01:40:49,322-282-global_steps:164000,grad:9.3673,MSELoss:0.0163,exp_loss:0.4706 +2022-10-16 01:41:27,920-344-epoch:14 +2022-10-16 01:42:58,250-282-global_steps:165000,grad:9.5371,MSELoss:0.0161,exp_loss:0.4579 +2022-10-16 01:45:11,920-282-global_steps:166000,grad:9.3571,MSELoss:0.0161,exp_loss:0.4579 +2022-10-16 01:47:24,111-282-global_steps:167000,grad:9.4045,MSELoss:0.0160,exp_loss:0.4553 +2022-10-16 01:49:31,880-282-global_steps:168000,grad:9.4722,MSELoss:0.0161,exp_loss:0.4565 +2022-10-16 01:51:44,798-282-global_steps:169000,grad:9.4884,MSELoss:0.0161,exp_loss:0.4576 +2022-10-16 01:53:57,994-282-global_steps:170000,grad:9.5003,MSELoss:0.0161,exp_loss:0.4578 +2022-10-16 01:56:04,284-282-global_steps:171000,grad:9.5392,MSELoss:0.0161,exp_loss:0.4592 +2022-10-16 01:58:09,184-282-global_steps:172000,grad:9.5425,MSELoss:0.0161,exp_loss:0.4590 +2022-10-16 02:00:18,217-282-global_steps:173000,grad:9.5031,MSELoss:0.0161,exp_loss:0.4583 +2022-10-16 02:02:09,982-282-global_steps:174000,grad:9.5616,MSELoss:0.0162,exp_loss:0.4595 +2022-10-16 02:04:07,481-282-global_steps:175000,grad:9.5957,MSELoss:0.0161,exp_loss:0.4594 +2022-10-16 02:06:10,221-282-global_steps:176000,grad:9.5811,MSELoss:0.0161,exp_loss:0.4599 +2022-10-16 02:06:17,381-344-epoch:15 +2022-10-16 02:08:01,940-282-global_steps:177000,grad:9.0775,MSELoss:0.0158,exp_loss:0.4424 +2022-10-16 02:09:57,621-282-global_steps:178000,grad:9.2714,MSELoss:0.0159,exp_loss:0.4455 +2022-10-16 02:12:05,144-282-global_steps:179000,grad:9.3635,MSELoss:0.0160,exp_loss:0.4469 +2022-10-16 02:14:09,222-282-global_steps:180000,grad:9.3904,MSELoss:0.0160,exp_loss:0.4476 +2022-10-16 02:16:20,843-282-global_steps:181000,grad:9.4638,MSELoss:0.0160,exp_loss:0.4482 +2022-10-16 02:18:33,259-282-global_steps:182000,grad:9.5187,MSELoss:0.0160,exp_loss:0.4478 +2022-10-16 02:20:46,684-282-global_steps:183000,grad:9.4880,MSELoss:0.0159,exp_loss:0.4484 +2022-10-16 02:22:56,366-282-global_steps:184000,grad:9.5265,MSELoss:0.0159,exp_loss:0.4488 +2022-10-16 02:24:57,801-282-global_steps:185000,grad:9.5458,MSELoss:0.0159,exp_loss:0.4487 +2022-10-16 02:26:56,670-282-global_steps:186000,grad:9.5633,MSELoss:0.0159,exp_loss:0.4497 +2022-10-16 02:29:04,826-282-global_steps:187000,grad:9.5718,MSELoss:0.0159,exp_loss:0.4491 +2022-10-16 02:30:41,773-344-epoch:16 +2022-10-16 02:31:06,263-282-global_steps:188000,grad:9.2658,MSELoss:0.0159,exp_loss:0.4400 +2022-10-16 02:33:13,666-282-global_steps:189000,grad:9.3744,MSELoss:0.0157,exp_loss:0.4358 +2022-10-16 02:35:22,286-282-global_steps:190000,grad:9.4029,MSELoss:0.0157,exp_loss:0.4347 +2022-10-16 02:37:32,049-282-global_steps:191000,grad:9.4270,MSELoss:0.0157,exp_loss:0.4349 +2022-10-16 02:39:42,468-282-global_steps:192000,grad:9.5346,MSELoss:0.0157,exp_loss:0.4372 +2022-10-16 02:41:51,803-282-global_steps:193000,grad:9.5478,MSELoss:0.0157,exp_loss:0.4382 +2022-10-16 02:43:59,453-282-global_steps:194000,grad:9.5566,MSELoss:0.0157,exp_loss:0.4376 +2022-10-16 02:46:05,629-282-global_steps:195000,grad:9.5263,MSELoss:0.0157,exp_loss:0.4383 +2022-10-16 02:48:09,761-282-global_steps:196000,grad:9.5843,MSELoss:0.0157,exp_loss:0.4392 +2022-10-16 02:50:15,047-282-global_steps:197000,grad:9.6210,MSELoss:0.0158,exp_loss:0.4394 +2022-10-16 02:52:23,157-282-global_steps:198000,grad:9.6614,MSELoss:0.0158,exp_loss:0.4395 +2022-10-16 02:54:30,026-282-global_steps:199000,grad:9.7043,MSELoss:0.0158,exp_loss:0.4401 +2022-10-16 02:55:37,270-344-epoch:17 +2022-10-16 02:56:39,864-282-global_steps:200000,grad:9.6032,MSELoss:0.0154,exp_loss:0.4291 +2022-10-16 02:58:52,238-282-global_steps:201000,grad:9.3950,MSELoss:0.0154,exp_loss:0.4285 +2022-10-16 03:01:02,309-282-global_steps:202000,grad:9.5307,MSELoss:0.0154,exp_loss:0.4279 +2022-10-16 03:03:11,260-282-global_steps:203000,grad:9.5482,MSELoss:0.0155,exp_loss:0.4304 +2022-10-16 03:05:20,150-282-global_steps:204000,grad:9.6168,MSELoss:0.0155,exp_loss:0.4316 +2022-10-16 03:07:30,493-282-global_steps:205000,grad:9.6689,MSELoss:0.0155,exp_loss:0.4294 +2022-10-16 03:09:40,403-282-global_steps:206000,grad:9.7298,MSELoss:0.0155,exp_loss:0.4303 +2022-10-16 03:11:43,582-282-global_steps:207000,grad:9.7264,MSELoss:0.0155,exp_loss:0.4295 +2022-10-16 03:13:54,497-282-global_steps:208000,grad:9.7376,MSELoss:0.0155,exp_loss:0.4292 +2022-10-16 03:16:08,479-282-global_steps:209000,grad:9.7644,MSELoss:0.0155,exp_loss:0.4296 +2022-10-16 03:18:23,869-282-global_steps:210000,grad:9.7969,MSELoss:0.0156,exp_loss:0.4301 +2022-10-16 03:20:36,312-282-global_steps:211000,grad:9.8307,MSELoss:0.0156,exp_loss:0.4304 +2022-10-16 03:21:13,186-344-epoch:18 +2022-10-16 03:22:49,076-282-global_steps:212000,grad:9.5336,MSELoss:0.0156,exp_loss:0.4252 +2022-10-16 03:25:04,900-282-global_steps:213000,grad:9.4698,MSELoss:0.0154,exp_loss:0.4190 +2022-10-16 03:27:16,277-282-global_steps:214000,grad:9.4965,MSELoss:0.0155,exp_loss:0.4229 +2022-10-16 03:29:33,226-282-global_steps:215000,grad:9.5831,MSELoss:0.0154,exp_loss:0.4206 +2022-10-16 03:31:51,362-282-global_steps:216000,grad:9.7296,MSELoss:0.0154,exp_loss:0.4209 +2022-10-16 03:34:05,671-282-global_steps:217000,grad:9.7594,MSELoss:0.0154,exp_loss:0.4210 +2022-10-16 03:36:18,988-282-global_steps:218000,grad:9.7491,MSELoss:0.0154,exp_loss:0.4220 +2022-10-16 03:38:23,614-282-global_steps:219000,grad:9.7642,MSELoss:0.0154,exp_loss:0.4206 +2022-10-16 03:40:27,580-282-global_steps:220000,grad:9.8443,MSELoss:0.0154,exp_loss:0.4209 +2022-10-16 03:42:30,710-282-global_steps:221000,grad:9.9090,MSELoss:0.0154,exp_loss:0.4215 +2022-10-16 03:44:43,305-282-global_steps:222000,grad:9.9524,MSELoss:0.0154,exp_loss:0.4217 +2022-10-16 03:46:56,894-282-global_steps:223000,grad:9.9490,MSELoss:0.0154,exp_loss:0.4219 +2022-10-16 03:46:57,247-344-epoch:19 +2022-10-16 03:49:02,738-282-global_steps:224000,grad:9.5708,MSELoss:0.0152,exp_loss:0.4136 +2022-10-16 03:51:03,152-282-global_steps:225000,grad:9.6187,MSELoss:0.0151,exp_loss:0.4097 +2022-10-16 03:53:07,221-282-global_steps:226000,grad:9.5846,MSELoss:0.0151,exp_loss:0.4092 +2022-10-16 03:55:19,992-282-global_steps:227000,grad:9.7132,MSELoss:0.0152,exp_loss:0.4117 +2022-10-16 03:57:27,886-282-global_steps:228000,grad:9.7612,MSELoss:0.0151,exp_loss:0.4112 +2022-10-16 03:59:27,899-282-global_steps:229000,grad:9.7986,MSELoss:0.0152,exp_loss:0.4130 +2022-10-16 04:01:29,259-282-global_steps:230000,grad:9.7960,MSELoss:0.0152,exp_loss:0.4131 +2022-10-16 04:03:22,467-282-global_steps:231000,grad:9.8412,MSELoss:0.0152,exp_loss:0.4126 +2022-10-16 04:05:31,492-282-global_steps:232000,grad:9.8585,MSELoss:0.0152,exp_loss:0.4127 +2022-10-16 04:07:43,919-282-global_steps:233000,grad:9.8855,MSELoss:0.0152,exp_loss:0.4130 +2022-10-16 04:09:56,115-282-global_steps:234000,grad:9.9112,MSELoss:0.0152,exp_loss:0.4139 +2022-10-16 04:11:26,069-344-epoch:20 +2022-10-16 04:12:00,351-282-global_steps:235000,grad:9.9009,MSELoss:0.0147,exp_loss:0.4075 +2022-10-16 04:14:08,091-282-global_steps:236000,grad:9.6670,MSELoss:0.0150,exp_loss:0.4017 +2022-10-16 04:16:16,036-282-global_steps:237000,grad:9.5981,MSELoss:0.0150,exp_loss:0.4020 +2022-10-16 04:18:08,724-282-global_steps:238000,grad:9.6779,MSELoss:0.0151,exp_loss:0.4061 +2022-10-16 04:20:05,042-282-global_steps:239000,grad:9.7113,MSELoss:0.0151,exp_loss:0.4060 +2022-10-16 04:22:17,732-282-global_steps:240000,grad:9.6576,MSELoss:0.0151,exp_loss:0.4059 +2022-10-16 04:24:17,057-282-global_steps:241000,grad:9.6705,MSELoss:0.0151,exp_loss:0.4062 +2022-10-16 04:26:31,764-282-global_steps:242000,grad:9.6956,MSELoss:0.0151,exp_loss:0.4054 +2022-10-16 04:28:44,448-282-global_steps:243000,grad:9.7968,MSELoss:0.0151,exp_loss:0.4046 +2022-10-16 04:30:58,475-282-global_steps:244000,grad:9.8324,MSELoss:0.0151,exp_loss:0.4042 +2022-10-16 04:33:02,532-282-global_steps:245000,grad:9.9028,MSELoss:0.0151,exp_loss:0.4051 +2022-10-16 04:35:00,529-282-global_steps:246000,grad:9.9018,MSELoss:0.0151,exp_loss:0.4047 +2022-10-16 04:35:51,441-344-epoch:21 +2022-10-16 04:36:51,902-282-global_steps:247000,grad:9.3193,MSELoss:0.0146,exp_loss:0.3818 +2022-10-16 04:39:03,316-282-global_steps:248000,grad:9.4944,MSELoss:0.0147,exp_loss:0.3854 +2022-10-16 04:41:11,404-282-global_steps:249000,grad:9.6336,MSELoss:0.0148,exp_loss:0.3927 +2022-10-16 04:43:22,639-282-global_steps:250000,grad:9.6977,MSELoss:0.0148,exp_loss:0.3927 +2022-10-16 04:45:23,247-282-global_steps:251000,grad:9.6873,MSELoss:0.0148,exp_loss:0.3933 +2022-10-16 04:47:36,114-282-global_steps:252000,grad:9.7574,MSELoss:0.0148,exp_loss:0.3939 +2022-10-16 04:49:43,037-282-global_steps:253000,grad:9.7456,MSELoss:0.0148,exp_loss:0.3942 +2022-10-16 04:51:53,601-282-global_steps:254000,grad:9.8368,MSELoss:0.0148,exp_loss:0.3950 +2022-10-16 04:54:02,064-282-global_steps:255000,grad:9.8309,MSELoss:0.0149,exp_loss:0.3959 +2022-10-16 04:56:06,631-282-global_steps:256000,grad:9.8493,MSELoss:0.0149,exp_loss:0.3960 +2022-10-16 04:58:11,548-282-global_steps:257000,grad:9.8730,MSELoss:0.0149,exp_loss:0.3967 +2022-10-16 05:00:21,660-282-global_steps:258000,grad:9.8915,MSELoss:0.0149,exp_loss:0.3969 +2022-10-16 05:00:49,160-344-epoch:22 +2022-10-16 05:02:32,313-282-global_steps:259000,grad:9.4638,MSELoss:0.0145,exp_loss:0.3834 +2022-10-16 05:04:28,322-282-global_steps:260000,grad:9.6873,MSELoss:0.0147,exp_loss:0.3880 +2022-10-16 05:06:29,368-282-global_steps:261000,grad:9.7223,MSELoss:0.0149,exp_loss:0.3893 +2022-10-16 05:08:35,644-282-global_steps:262000,grad:9.7410,MSELoss:0.0148,exp_loss:0.3903 +2022-10-16 05:10:33,704-282-global_steps:263000,grad:9.7462,MSELoss:0.0148,exp_loss:0.3904 +2022-10-16 05:12:39,827-282-global_steps:264000,grad:9.8024,MSELoss:0.0148,exp_loss:0.3909 +2022-10-16 05:14:50,061-282-global_steps:265000,grad:9.8752,MSELoss:0.0148,exp_loss:0.3903 +2022-10-16 05:17:00,721-282-global_steps:266000,grad:9.8832,MSELoss:0.0148,exp_loss:0.3900 +2022-10-16 05:19:04,685-282-global_steps:267000,grad:9.8880,MSELoss:0.0148,exp_loss:0.3901 +2022-10-16 05:21:08,796-282-global_steps:268000,grad:9.8797,MSELoss:0.0148,exp_loss:0.3896 +2022-10-16 05:23:08,801-282-global_steps:269000,grad:9.8859,MSELoss:0.0147,exp_loss:0.3895 +2022-10-16 05:25:09,476-344-epoch:23 +2022-10-16 05:25:15,805-282-global_steps:270000,grad:10.3298,MSELoss:0.0142,exp_loss:0.3657 +2022-10-16 05:27:22,980-282-global_steps:271000,grad:10.1481,MSELoss:0.0146,exp_loss:0.3845 +2022-10-16 05:29:31,986-282-global_steps:272000,grad:10.0752,MSELoss:0.0146,exp_loss:0.3810 +2022-10-16 05:31:41,458-282-global_steps:273000,grad:10.0073,MSELoss:0.0147,exp_loss:0.3805 +2022-10-16 05:33:43,988-282-global_steps:274000,grad:9.9489,MSELoss:0.0147,exp_loss:0.3823 +2022-10-16 05:35:52,472-282-global_steps:275000,grad:9.9537,MSELoss:0.0147,exp_loss:0.3832 +2022-10-16 05:37:57,029-282-global_steps:276000,grad:9.9112,MSELoss:0.0147,exp_loss:0.3818 +2022-10-16 05:40:02,636-282-global_steps:277000,grad:9.8594,MSELoss:0.0146,exp_loss:0.3815 +2022-10-16 05:42:07,947-282-global_steps:278000,grad:9.8581,MSELoss:0.0146,exp_loss:0.3820 +2022-10-16 05:44:19,009-282-global_steps:279000,grad:9.8738,MSELoss:0.0146,exp_loss:0.3818 +2022-10-16 05:46:22,444-282-global_steps:280000,grad:9.8507,MSELoss:0.0146,exp_loss:0.3819 +2022-10-16 05:48:34,326-282-global_steps:281000,grad:9.8260,MSELoss:0.0146,exp_loss:0.3819 +2022-10-16 05:49:56,850-344-epoch:24 +2022-10-16 05:50:32,884-282-global_steps:282000,grad:10.1219,MSELoss:0.0149,exp_loss:0.3891 +2022-10-16 05:52:42,045-282-global_steps:283000,grad:9.7724,MSELoss:0.0145,exp_loss:0.3741 +2022-10-16 05:54:55,719-282-global_steps:284000,grad:9.8265,MSELoss:0.0144,exp_loss:0.3740 +2022-10-16 05:57:09,765-282-global_steps:285000,grad:9.6989,MSELoss:0.0144,exp_loss:0.3732 +2022-10-16 05:59:23,210-282-global_steps:286000,grad:9.6985,MSELoss:0.0144,exp_loss:0.3718 +2022-10-16 06:01:30,670-282-global_steps:287000,grad:9.6703,MSELoss:0.0144,exp_loss:0.3728 +2022-10-16 06:03:40,703-282-global_steps:288000,grad:9.7395,MSELoss:0.0144,exp_loss:0.3738 +2022-10-16 06:05:56,235-282-global_steps:289000,grad:9.7933,MSELoss:0.0145,exp_loss:0.3761 +2022-10-16 06:08:07,080-282-global_steps:290000,grad:9.8008,MSELoss:0.0145,exp_loss:0.3756 +2022-10-16 06:10:13,156-282-global_steps:291000,grad:9.8214,MSELoss:0.0145,exp_loss:0.3757 +2022-10-16 06:12:18,614-282-global_steps:292000,grad:9.8102,MSELoss:0.0145,exp_loss:0.3754 +2022-10-16 06:14:30,330-282-global_steps:293000,grad:9.8041,MSELoss:0.0145,exp_loss:0.3751 +2022-10-16 06:15:25,077-344-epoch:25 +2022-10-16 06:16:42,150-282-global_steps:294000,grad:9.7245,MSELoss:0.0145,exp_loss:0.3720 +2022-10-16 06:18:54,942-282-global_steps:295000,grad:9.6394,MSELoss:0.0144,exp_loss:0.3623 +2022-10-16 06:21:03,850-282-global_steps:296000,grad:9.6808,MSELoss:0.0144,exp_loss:0.3683 +2022-10-16 06:23:08,291-282-global_steps:297000,grad:9.5633,MSELoss:0.0144,exp_loss:0.3666 +2022-10-16 06:25:16,443-282-global_steps:298000,grad:9.6460,MSELoss:0.0144,exp_loss:0.3681 +2022-10-16 06:27:19,875-282-global_steps:299000,grad:9.6673,MSELoss:0.0144,exp_loss:0.3684 +2022-10-16 06:29:24,499-282-global_steps:300000,grad:9.7002,MSELoss:0.0144,exp_loss:0.3683 +2022-10-16 06:31:33,375-282-global_steps:301000,grad:9.6751,MSELoss:0.0144,exp_loss:0.3692 +2022-10-16 06:33:42,893-282-global_steps:302000,grad:9.6744,MSELoss:0.0144,exp_loss:0.3689 +2022-10-16 06:35:52,937-282-global_steps:303000,grad:9.6775,MSELoss:0.0144,exp_loss:0.3690 +2022-10-16 06:38:01,831-282-global_steps:304000,grad:9.6891,MSELoss:0.0144,exp_loss:0.3683 +2022-10-16 06:40:06,873-282-global_steps:305000,grad:9.6918,MSELoss:0.0143,exp_loss:0.3682 +2022-10-16 06:40:28,258-344-epoch:26 +2022-10-16 06:42:09,001-282-global_steps:306000,grad:9.6585,MSELoss:0.0139,exp_loss:0.3560 +2022-10-16 06:44:09,022-282-global_steps:307000,grad:9.7695,MSELoss:0.0141,exp_loss:0.3614 +2022-10-16 06:46:02,238-282-global_steps:308000,grad:9.7907,MSELoss:0.0141,exp_loss:0.3621 +2022-10-16 06:47:53,765-282-global_steps:309000,grad:9.7626,MSELoss:0.0142,exp_loss:0.3623 +2022-10-16 06:49:51,397-282-global_steps:310000,grad:9.7727,MSELoss:0.0142,exp_loss:0.3623 +2022-10-16 06:52:04,336-282-global_steps:311000,grad:9.7181,MSELoss:0.0142,exp_loss:0.3612 +2022-10-16 06:54:05,097-282-global_steps:312000,grad:9.7189,MSELoss:0.0143,exp_loss:0.3607 +2022-10-16 06:56:16,769-282-global_steps:313000,grad:9.7389,MSELoss:0.0142,exp_loss:0.3608 +2022-10-16 06:58:25,076-282-global_steps:314000,grad:9.7205,MSELoss:0.0143,exp_loss:0.3608 +2022-10-16 07:00:36,310-282-global_steps:315000,grad:9.7338,MSELoss:0.0143,exp_loss:0.3623 +2022-10-16 07:02:46,493-282-global_steps:316000,grad:9.7395,MSELoss:0.0142,exp_loss:0.3622 +2022-10-16 07:04:41,758-344-epoch:27 +2022-10-16 07:04:55,269-282-global_steps:317000,grad:10.0111,MSELoss:0.0141,exp_loss:0.3556 +2022-10-16 07:07:04,081-282-global_steps:318000,grad:9.7067,MSELoss:0.0143,exp_loss:0.3521 +2022-10-16 07:09:14,029-282-global_steps:319000,grad:9.6293,MSELoss:0.0141,exp_loss:0.3533 +2022-10-16 07:11:25,023-282-global_steps:320000,grad:9.5949,MSELoss:0.0140,exp_loss:0.3527 +2022-10-16 07:13:42,604-282-global_steps:321000,grad:9.6133,MSELoss:0.0141,exp_loss:0.3529 +2022-10-16 07:15:56,987-282-global_steps:322000,grad:9.6412,MSELoss:0.0141,exp_loss:0.3537 +2022-10-16 07:18:13,626-282-global_steps:323000,grad:9.5927,MSELoss:0.0141,exp_loss:0.3534 +2022-10-16 07:20:21,950-282-global_steps:324000,grad:9.5973,MSELoss:0.0141,exp_loss:0.3541 +2022-10-16 07:22:34,093-282-global_steps:325000,grad:9.6023,MSELoss:0.0141,exp_loss:0.3550 +2022-10-16 07:24:42,967-282-global_steps:326000,grad:9.6112,MSELoss:0.0141,exp_loss:0.3559 +2022-10-16 07:26:55,591-282-global_steps:327000,grad:9.6009,MSELoss:0.0141,exp_loss:0.3553 +2022-10-16 07:29:08,106-282-global_steps:328000,grad:9.6290,MSELoss:0.0141,exp_loss:0.3558 +2022-10-16 07:30:32,315-344-epoch:28 +2022-10-16 07:31:19,738-282-global_steps:329000,grad:9.8296,MSELoss:0.0137,exp_loss:0.3567 +2022-10-16 07:33:35,400-282-global_steps:330000,grad:9.6335,MSELoss:0.0139,exp_loss:0.3496 +2022-10-16 07:35:47,323-282-global_steps:331000,grad:9.5405,MSELoss:0.0140,exp_loss:0.3509 +2022-10-16 07:37:55,236-282-global_steps:332000,grad:9.5419,MSELoss:0.0141,exp_loss:0.3521 +2022-10-16 07:40:04,830-282-global_steps:333000,grad:9.5985,MSELoss:0.0141,exp_loss:0.3522 +2022-10-16 07:42:09,707-282-global_steps:334000,grad:9.6069,MSELoss:0.0141,exp_loss:0.3530 +2022-10-16 07:44:20,628-282-global_steps:335000,grad:9.6141,MSELoss:0.0140,exp_loss:0.3514 +2022-10-16 07:46:28,559-282-global_steps:336000,grad:9.5932,MSELoss:0.0140,exp_loss:0.3506 +2022-10-16 07:48:37,718-282-global_steps:337000,grad:9.5965,MSELoss:0.0140,exp_loss:0.3505 +2022-10-16 07:50:43,313-282-global_steps:338000,grad:9.5959,MSELoss:0.0140,exp_loss:0.3503 +2022-10-16 07:52:38,735-282-global_steps:339000,grad:9.5890,MSELoss:0.0140,exp_loss:0.3503 +2022-10-16 07:54:51,962-282-global_steps:340000,grad:9.6047,MSELoss:0.0140,exp_loss:0.3506 +2022-10-16 07:55:41,523-344-epoch:29 +2022-10-16 07:57:04,152-282-global_steps:341000,grad:10.0207,MSELoss:0.0139,exp_loss:0.3495 +2022-10-16 07:59:19,549-282-global_steps:342000,grad:9.6138,MSELoss:0.0138,exp_loss:0.3421 +2022-10-16 08:01:35,263-282-global_steps:343000,grad:9.5889,MSELoss:0.0138,exp_loss:0.3445 +2022-10-16 08:03:51,410-282-global_steps:344000,grad:9.5032,MSELoss:0.0139,exp_loss:0.3456 +2022-10-16 08:06:03,258-282-global_steps:345000,grad:9.4825,MSELoss:0.0139,exp_loss:0.3445 +2022-10-16 08:08:17,390-282-global_steps:346000,grad:9.4809,MSELoss:0.0139,exp_loss:0.3450 +2022-10-16 08:10:32,220-282-global_steps:347000,grad:9.4109,MSELoss:0.0139,exp_loss:0.3438 +2022-10-16 08:12:45,898-282-global_steps:348000,grad:9.4212,MSELoss:0.0139,exp_loss:0.3445 +2022-10-16 08:14:57,444-282-global_steps:349000,grad:9.4180,MSELoss:0.0139,exp_loss:0.3451 +2022-10-16 08:17:10,386-282-global_steps:350000,grad:9.4510,MSELoss:0.0139,exp_loss:0.3450 +2022-10-16 08:19:22,777-282-global_steps:351000,grad:9.4818,MSELoss:0.0139,exp_loss:0.3462 +2022-10-16 08:21:34,881-282-global_steps:352000,grad:9.4865,MSELoss:0.0139,exp_loss:0.3457 +2022-10-16 08:21:50,930-344-epoch:30 +2022-10-16 08:23:44,465-282-global_steps:353000,grad:9.3742,MSELoss:0.0138,exp_loss:0.3404 +2022-10-16 08:25:55,307-282-global_steps:354000,grad:9.2686,MSELoss:0.0136,exp_loss:0.3369 +2022-10-16 08:28:06,754-282-global_steps:355000,grad:9.4190,MSELoss:0.0137,exp_loss:0.3395 +2022-10-16 08:30:18,250-282-global_steps:356000,grad:9.4630,MSELoss:0.0138,exp_loss:0.3387 +2022-10-16 08:32:28,685-282-global_steps:357000,grad:9.4424,MSELoss:0.0138,exp_loss:0.3403 +2022-10-16 08:34:34,522-282-global_steps:358000,grad:9.4462,MSELoss:0.0138,exp_loss:0.3402 +2022-10-16 08:36:44,521-282-global_steps:359000,grad:9.4177,MSELoss:0.0137,exp_loss:0.3394 +2022-10-16 08:38:50,194-282-global_steps:360000,grad:9.4414,MSELoss:0.0138,exp_loss:0.3400 +2022-10-16 08:40:53,709-282-global_steps:361000,grad:9.4615,MSELoss:0.0138,exp_loss:0.3405 +2022-10-16 08:43:03,969-282-global_steps:362000,grad:9.4614,MSELoss:0.0138,exp_loss:0.3413 +2022-10-16 08:45:10,455-282-global_steps:363000,grad:9.4296,MSELoss:0.0138,exp_loss:0.3402 +2022-10-16 08:46:58,678-344-epoch:31 +2022-10-16 08:47:19,480-282-global_steps:364000,grad:9.1266,MSELoss:0.0134,exp_loss:0.3369 +2022-10-16 08:49:31,588-282-global_steps:365000,grad:9.2099,MSELoss:0.0136,exp_loss:0.3343 +2022-10-16 08:51:42,630-282-global_steps:366000,grad:9.2713,MSELoss:0.0136,exp_loss:0.3346 +2022-10-16 08:54:01,844-282-global_steps:367000,grad:9.2936,MSELoss:0.0137,exp_loss:0.3337 +2022-10-16 08:56:14,877-282-global_steps:368000,grad:9.4003,MSELoss:0.0137,exp_loss:0.3338 +2022-10-16 08:58:28,804-282-global_steps:369000,grad:9.3558,MSELoss:0.0137,exp_loss:0.3338 +2022-10-16 09:00:43,151-282-global_steps:370000,grad:9.3508,MSELoss:0.0137,exp_loss:0.3347 +2022-10-16 09:02:50,084-282-global_steps:371000,grad:9.3837,MSELoss:0.0137,exp_loss:0.3351 +2022-10-16 09:04:56,319-282-global_steps:372000,grad:9.4038,MSELoss:0.0137,exp_loss:0.3355 +2022-10-16 09:07:02,216-282-global_steps:373000,grad:9.4276,MSELoss:0.0137,exp_loss:0.3360 +2022-10-16 09:09:11,987-282-global_steps:374000,grad:9.4262,MSELoss:0.0137,exp_loss:0.3358 +2022-10-16 09:11:15,531-282-global_steps:375000,grad:9.4137,MSELoss:0.0137,exp_loss:0.3357 +2022-10-16 09:12:24,906-344-epoch:32 +2022-10-16 09:13:08,269-282-global_steps:376000,grad:9.4904,MSELoss:0.0134,exp_loss:0.3332 +2022-10-16 09:15:02,494-282-global_steps:377000,grad:9.4718,MSELoss:0.0135,exp_loss:0.3301 +2022-10-16 09:16:50,822-282-global_steps:378000,grad:9.3663,MSELoss:0.0136,exp_loss:0.3300 +2022-10-16 09:18:41,461-282-global_steps:379000,grad:9.3936,MSELoss:0.0137,exp_loss:0.3316 +2022-10-16 09:20:51,359-282-global_steps:380000,grad:9.4301,MSELoss:0.0137,exp_loss:0.3316 +2022-10-16 09:22:59,339-282-global_steps:381000,grad:9.4129,MSELoss:0.0137,exp_loss:0.3314 +2022-10-16 09:25:04,337-282-global_steps:382000,grad:9.3802,MSELoss:0.0137,exp_loss:0.3314 +2022-10-16 09:27:16,622-282-global_steps:383000,grad:9.3280,MSELoss:0.0137,exp_loss:0.3308 +2022-10-16 09:29:12,409-282-global_steps:384000,grad:9.2755,MSELoss:0.0137,exp_loss:0.3301 +2022-10-16 09:31:15,006-282-global_steps:385000,grad:9.3093,MSELoss:0.0136,exp_loss:0.3307 +2022-10-16 09:33:15,892-282-global_steps:386000,grad:9.3045,MSELoss:0.0137,exp_loss:0.3307 +2022-10-16 09:35:26,991-282-global_steps:387000,grad:9.3040,MSELoss:0.0136,exp_loss:0.3312 +2022-10-16 09:36:07,358-344-epoch:33 +2022-10-16 09:37:32,846-282-global_steps:388000,grad:9.3450,MSELoss:0.0134,exp_loss:0.3263 +2022-10-16 09:39:30,367-282-global_steps:389000,grad:9.3336,MSELoss:0.0135,exp_loss:0.3269 +2022-10-16 09:41:40,152-282-global_steps:390000,grad:9.3339,MSELoss:0.0135,exp_loss:0.3275 +2022-10-16 09:43:50,640-282-global_steps:391000,grad:9.3168,MSELoss:0.0135,exp_loss:0.3263 +2022-10-16 09:45:55,607-282-global_steps:392000,grad:9.2988,MSELoss:0.0135,exp_loss:0.3263 +2022-10-16 09:48:04,173-282-global_steps:393000,grad:9.2740,MSELoss:0.0135,exp_loss:0.3261 +2022-10-16 09:50:13,555-282-global_steps:394000,grad:9.2950,MSELoss:0.0135,exp_loss:0.3264 +2022-10-16 09:52:11,556-282-global_steps:395000,grad:9.2862,MSELoss:0.0135,exp_loss:0.3267 +2022-10-16 09:54:16,751-282-global_steps:396000,grad:9.2617,MSELoss:0.0135,exp_loss:0.3270 +2022-10-16 09:56:21,976-282-global_steps:397000,grad:9.2575,MSELoss:0.0135,exp_loss:0.3268 +2022-10-16 09:58:35,946-282-global_steps:398000,grad:9.2239,MSELoss:0.0135,exp_loss:0.3265 +2022-10-16 10:00:39,455-282-global_steps:399000,grad:9.2258,MSELoss:0.0135,exp_loss:0.3266 +2022-10-16 10:00:46,455-344-epoch:34 +2022-10-16 10:02:43,360-282-global_steps:400000,grad:9.8151,MSELoss:0.0135,exp_loss:0.3253 +2022-10-16 10:04:44,472-282-global_steps:401000,grad:9.6125,MSELoss:0.0134,exp_loss:0.3228 +2022-10-16 10:06:45,921-282-global_steps:402000,grad:9.4195,MSELoss:0.0134,exp_loss:0.3208 +2022-10-16 10:08:50,746-282-global_steps:403000,grad:9.2919,MSELoss:0.0134,exp_loss:0.3213 +2022-10-16 10:11:00,415-282-global_steps:404000,grad:9.3021,MSELoss:0.0134,exp_loss:0.3217 +2022-10-16 10:13:14,436-282-global_steps:405000,grad:9.2981,MSELoss:0.0134,exp_loss:0.3222 +2022-10-16 10:15:23,220-282-global_steps:406000,grad:9.2719,MSELoss:0.0135,exp_loss:0.3228 +2022-10-16 10:17:34,534-282-global_steps:407000,grad:9.2636,MSELoss:0.0135,exp_loss:0.3230 +2022-10-16 10:19:46,835-282-global_steps:408000,grad:9.2452,MSELoss:0.0135,exp_loss:0.3225 +2022-10-16 10:21:55,612-282-global_steps:409000,grad:9.2324,MSELoss:0.0135,exp_loss:0.3223 +2022-10-16 10:24:01,255-282-global_steps:410000,grad:9.2311,MSELoss:0.0135,exp_loss:0.3223 +2022-10-16 10:25:42,411-344-epoch:35 +2022-10-16 10:26:08,794-282-global_steps:411000,grad:9.5703,MSELoss:0.0137,exp_loss:0.3348 +2022-10-16 10:28:21,338-282-global_steps:412000,grad:9.4192,MSELoss:0.0135,exp_loss:0.3237 +2022-10-16 10:30:27,796-282-global_steps:413000,grad:9.3892,MSELoss:0.0135,exp_loss:0.3240 +2022-10-16 10:32:36,934-282-global_steps:414000,grad:9.3728,MSELoss:0.0135,exp_loss:0.3224 +2022-10-16 10:34:49,345-282-global_steps:415000,grad:9.3524,MSELoss:0.0135,exp_loss:0.3208 +2022-10-16 10:36:56,798-282-global_steps:416000,grad:9.3124,MSELoss:0.0135,exp_loss:0.3215 +2022-10-16 10:39:07,268-282-global_steps:417000,grad:9.2463,MSELoss:0.0134,exp_loss:0.3212 +2022-10-16 10:41:14,531-282-global_steps:418000,grad:9.1836,MSELoss:0.0134,exp_loss:0.3205 +2022-10-16 10:43:26,916-282-global_steps:419000,grad:9.1599,MSELoss:0.0134,exp_loss:0.3190 +2022-10-16 10:45:39,739-282-global_steps:420000,grad:9.1648,MSELoss:0.0134,exp_loss:0.3190 +2022-10-16 10:47:50,469-282-global_steps:421000,grad:9.1423,MSELoss:0.0134,exp_loss:0.3188 +2022-10-16 10:49:51,806-282-global_steps:422000,grad:9.1309,MSELoss:0.0134,exp_loss:0.3182 +2022-10-16 10:50:58,806-344-epoch:36 +2022-10-16 10:51:57,155-282-global_steps:423000,grad:9.1238,MSELoss:0.0131,exp_loss:0.3139 +2022-10-16 10:53:56,744-282-global_steps:424000,grad:9.3475,MSELoss:0.0134,exp_loss:0.3204 +2022-10-16 10:56:08,570-282-global_steps:425000,grad:9.1866,MSELoss:0.0133,exp_loss:0.3181 +2022-10-16 10:58:10,969-282-global_steps:426000,grad:9.1775,MSELoss:0.0134,exp_loss:0.3175 +2022-10-16 11:00:20,131-282-global_steps:427000,grad:9.1490,MSELoss:0.0134,exp_loss:0.3167 +2022-10-16 11:02:32,275-282-global_steps:428000,grad:9.1255,MSELoss:0.0133,exp_loss:0.3151 +2022-10-16 11:04:45,591-282-global_steps:429000,grad:9.0935,MSELoss:0.0133,exp_loss:0.3152 +2022-10-16 11:07:00,394-282-global_steps:430000,grad:9.0968,MSELoss:0.0133,exp_loss:0.3151 +2022-10-16 11:09:12,225-282-global_steps:431000,grad:9.0939,MSELoss:0.0133,exp_loss:0.3151 +2022-10-16 11:10:58,557-282-global_steps:432000,grad:9.0975,MSELoss:0.0133,exp_loss:0.3145 +2022-10-16 11:12:36,159-282-global_steps:433000,grad:9.0999,MSELoss:0.0133,exp_loss:0.3142 +2022-10-16 11:14:15,928-282-global_steps:434000,grad:9.0880,MSELoss:0.0133,exp_loss:0.3144 +2022-10-16 11:14:43,797-344-epoch:37 +2022-10-16 11:15:57,861-282-global_steps:435000,grad:9.1184,MSELoss:0.0131,exp_loss:0.3056 +2022-10-16 11:17:43,070-282-global_steps:436000,grad:8.9324,MSELoss:0.0132,exp_loss:0.3043 +2022-10-16 11:19:29,352-282-global_steps:437000,grad:8.9577,MSELoss:0.0133,exp_loss:0.3069 +2022-10-16 11:21:23,753-282-global_steps:438000,grad:8.9911,MSELoss:0.0133,exp_loss:0.3091 +2022-10-16 11:23:33,074-282-global_steps:439000,grad:8.9933,MSELoss:0.0133,exp_loss:0.3089 +2022-10-16 11:25:36,783-282-global_steps:440000,grad:8.9889,MSELoss:0.0133,exp_loss:0.3088 +2022-10-16 11:27:47,102-282-global_steps:441000,grad:8.9923,MSELoss:0.0133,exp_loss:0.3096 +2022-10-16 11:29:59,557-282-global_steps:442000,grad:9.0042,MSELoss:0.0132,exp_loss:0.3097 +2022-10-16 11:32:05,220-282-global_steps:443000,grad:8.9888,MSELoss:0.0132,exp_loss:0.3096 +2022-10-16 11:34:21,795-282-global_steps:444000,grad:9.0107,MSELoss:0.0132,exp_loss:0.3100 +2022-10-16 11:36:38,976-282-global_steps:445000,grad:9.0082,MSELoss:0.0132,exp_loss:0.3102 +2022-10-16 11:38:51,556-282-global_steps:446000,grad:9.0253,MSELoss:0.0132,exp_loss:0.3105 +2022-10-16 11:38:52,386-344-epoch:38 +2022-10-16 11:41:09,325-282-global_steps:447000,grad:8.8213,MSELoss:0.0131,exp_loss:0.3036 +2022-10-16 11:43:24,909-282-global_steps:448000,grad:9.0304,MSELoss:0.0131,exp_loss:0.3067 +2022-10-16 11:45:38,455-282-global_steps:449000,grad:8.9981,MSELoss:0.0131,exp_loss:0.3049 +2022-10-16 11:47:36,329-282-global_steps:450000,grad:9.0489,MSELoss:0.0131,exp_loss:0.3061 +2022-10-16 11:49:21,370-282-global_steps:451000,grad:9.0669,MSELoss:0.0131,exp_loss:0.3069 +2022-10-16 11:51:26,613-282-global_steps:452000,grad:9.0392,MSELoss:0.0131,exp_loss:0.3062 +2022-10-16 11:53:34,851-282-global_steps:453000,grad:9.0000,MSELoss:0.0131,exp_loss:0.3055 +2022-10-16 11:55:40,008-282-global_steps:454000,grad:8.9591,MSELoss:0.0131,exp_loss:0.3051 +2022-10-16 11:57:47,813-282-global_steps:455000,grad:8.9634,MSELoss:0.0131,exp_loss:0.3056 +2022-10-16 11:59:54,245-282-global_steps:456000,grad:8.9388,MSELoss:0.0131,exp_loss:0.3054 +2022-10-16 12:01:56,662-282-global_steps:457000,grad:8.9371,MSELoss:0.0131,exp_loss:0.3060 +2022-10-16 12:03:14,804-344-epoch:39 +2022-10-16 12:03:43,261-282-global_steps:458000,grad:9.5785,MSELoss:0.0144,exp_loss:0.3273 +2022-10-16 12:05:40,336-282-global_steps:459000,grad:9.0730,MSELoss:0.0135,exp_loss:0.3062 +2022-10-16 12:07:18,239-282-global_steps:460000,grad:8.8718,MSELoss:0.0132,exp_loss:0.3015 +2022-10-16 12:09:15,685-282-global_steps:461000,grad:8.8883,MSELoss:0.0132,exp_loss:0.3016 +2022-10-16 12:11:11,462-282-global_steps:462000,grad:8.8595,MSELoss:0.0132,exp_loss:0.3023 +2022-10-16 12:13:18,791-282-global_steps:463000,grad:8.8504,MSELoss:0.0131,exp_loss:0.3020 +2022-10-16 12:15:16,209-282-global_steps:464000,grad:8.8885,MSELoss:0.0131,exp_loss:0.3017 +2022-10-16 12:17:13,387-282-global_steps:465000,grad:8.8611,MSELoss:0.0131,exp_loss:0.3021 +2022-10-16 12:19:15,654-282-global_steps:466000,grad:8.8712,MSELoss:0.0131,exp_loss:0.3024 +2022-10-16 12:21:24,222-282-global_steps:467000,grad:8.8858,MSELoss:0.0131,exp_loss:0.3025 +2022-10-16 12:23:28,702-282-global_steps:468000,grad:8.8854,MSELoss:0.0131,exp_loss:0.3030 +2022-10-16 12:25:42,211-282-global_steps:469000,grad:8.8915,MSELoss:0.0131,exp_loss:0.3029 +2022-10-16 12:26:40,420-344-epoch:40 +2022-10-16 12:27:50,617-282-global_steps:470000,grad:9.2805,MSELoss:0.0134,exp_loss:0.3129 +2022-10-16 12:30:03,007-282-global_steps:471000,grad:9.0897,MSELoss:0.0131,exp_loss:0.3062 +2022-10-16 12:32:13,540-282-global_steps:472000,grad:9.0797,MSELoss:0.0130,exp_loss:0.3025 +2022-10-16 12:34:17,019-282-global_steps:473000,grad:8.9805,MSELoss:0.0130,exp_loss:0.3017 +2022-10-16 12:36:13,006-282-global_steps:474000,grad:8.9836,MSELoss:0.0131,exp_loss:0.3032 +2022-10-16 12:38:10,887-282-global_steps:475000,grad:8.9261,MSELoss:0.0131,exp_loss:0.3026 +2022-10-16 12:40:20,613-282-global_steps:476000,grad:8.8895,MSELoss:0.0130,exp_loss:0.3017 +2022-10-16 12:42:28,675-282-global_steps:477000,grad:8.8675,MSELoss:0.0130,exp_loss:0.3006 +2022-10-16 12:44:31,036-282-global_steps:478000,grad:8.8216,MSELoss:0.0130,exp_loss:0.2992 +2022-10-16 12:46:29,674-282-global_steps:479000,grad:8.8487,MSELoss:0.0130,exp_loss:0.2994 +2022-10-16 12:48:31,742-282-global_steps:480000,grad:8.8592,MSELoss:0.0130,exp_loss:0.2996 +2022-10-16 12:50:29,568-282-global_steps:481000,grad:8.8354,MSELoss:0.0130,exp_loss:0.2992 +2022-10-16 12:50:57,065-344-epoch:41 +2022-10-16 12:52:40,718-282-global_steps:482000,grad:8.7507,MSELoss:0.0130,exp_loss:0.2956 +2022-10-16 12:54:39,310-282-global_steps:483000,grad:8.8518,MSELoss:0.0129,exp_loss:0.2938 +2022-10-16 12:56:50,494-282-global_steps:484000,grad:8.8848,MSELoss:0.0130,exp_loss:0.2946 +2022-10-16 12:58:55,361-282-global_steps:485000,grad:8.9177,MSELoss:0.0131,exp_loss:0.2960 +2022-10-16 13:00:56,825-282-global_steps:486000,grad:8.9324,MSELoss:0.0130,exp_loss:0.2967 +2022-10-16 13:02:40,336-282-global_steps:487000,grad:8.9022,MSELoss:0.0130,exp_loss:0.2968 +2022-10-16 13:04:26,384-282-global_steps:488000,grad:8.8821,MSELoss:0.0130,exp_loss:0.2970 +2022-10-16 13:06:35,893-282-global_steps:489000,grad:8.8757,MSELoss:0.0130,exp_loss:0.2971 +2022-10-16 13:08:40,190-282-global_steps:490000,grad:8.8639,MSELoss:0.0130,exp_loss:0.2965 +2022-10-16 13:10:36,653-282-global_steps:491000,grad:8.8302,MSELoss:0.0130,exp_loss:0.2966 +2022-10-16 13:12:40,237-282-global_steps:492000,grad:8.8492,MSELoss:0.0130,exp_loss:0.2971 +2022-10-16 13:14:41,323-344-epoch:42 +2022-10-16 13:14:47,515-282-global_steps:493000,grad:9.1488,MSELoss:0.0128,exp_loss:0.2730 +2022-10-16 13:16:57,990-282-global_steps:494000,grad:8.6809,MSELoss:0.0126,exp_loss:0.2890 +2022-10-16 13:19:06,826-282-global_steps:495000,grad:8.7517,MSELoss:0.0127,exp_loss:0.2920 +2022-10-16 13:21:18,780-282-global_steps:496000,grad:8.7947,MSELoss:0.0129,exp_loss:0.2947 +2022-10-16 13:23:30,487-282-global_steps:497000,grad:8.7892,MSELoss:0.0129,exp_loss:0.2936 +2022-10-16 13:25:49,747-282-global_steps:498000,grad:8.7814,MSELoss:0.0129,exp_loss:0.2942 +2022-10-16 13:28:08,191-282-global_steps:499000,grad:8.7816,MSELoss:0.0129,exp_loss:0.2937 +2022-10-16 13:30:07,181-282-global_steps:500000,grad:8.7945,MSELoss:0.0129,exp_loss:0.2940 +2022-10-16 13:32:18,970-282-global_steps:501000,grad:8.7914,MSELoss:0.0129,exp_loss:0.2939 +2022-10-16 13:34:32,403-282-global_steps:502000,grad:8.7762,MSELoss:0.0129,exp_loss:0.2936 +2022-10-16 13:36:49,358-282-global_steps:503000,grad:8.7533,MSELoss:0.0129,exp_loss:0.2929 +2022-10-16 13:39:03,891-282-global_steps:504000,grad:8.7481,MSELoss:0.0129,exp_loss:0.2929 +2022-10-16 13:40:36,603-344-epoch:43 +2022-10-16 13:41:18,440-282-global_steps:505000,grad:9.0220,MSELoss:0.0131,exp_loss:0.2995 +2022-10-16 13:43:32,930-282-global_steps:506000,grad:8.9182,MSELoss:0.0128,exp_loss:0.2912 +2022-10-16 13:45:42,732-282-global_steps:507000,grad:8.7905,MSELoss:0.0128,exp_loss:0.2891 +2022-10-16 13:47:57,856-282-global_steps:508000,grad:8.7650,MSELoss:0.0128,exp_loss:0.2899 +2022-10-16 13:50:12,174-282-global_steps:509000,grad:8.7272,MSELoss:0.0128,exp_loss:0.2908 +2022-10-16 13:52:26,083-282-global_steps:510000,grad:8.6880,MSELoss:0.0128,exp_loss:0.2902 +2022-10-16 13:54:20,539-282-global_steps:511000,grad:8.6669,MSELoss:0.0128,exp_loss:0.2901 +2022-10-16 13:56:17,468-282-global_steps:512000,grad:8.6821,MSELoss:0.0128,exp_loss:0.2910 +2022-10-16 13:58:26,434-282-global_steps:513000,grad:8.6558,MSELoss:0.0128,exp_loss:0.2903 +2022-10-16 14:00:25,636-282-global_steps:514000,grad:8.6595,MSELoss:0.0128,exp_loss:0.2901 +2022-10-16 14:02:25,491-282-global_steps:515000,grad:8.6688,MSELoss:0.0128,exp_loss:0.2897 +2022-10-16 14:04:32,711-282-global_steps:516000,grad:8.6588,MSELoss:0.0128,exp_loss:0.2897 +2022-10-16 14:05:25,729-344-epoch:44 +2022-10-16 14:06:36,464-282-global_steps:517000,grad:8.5147,MSELoss:0.0126,exp_loss:0.2840 +2022-10-16 14:08:18,539-282-global_steps:518000,grad:8.6271,MSELoss:0.0126,exp_loss:0.2848 +2022-10-16 14:10:20,144-282-global_steps:519000,grad:8.5846,MSELoss:0.0126,exp_loss:0.2834 +2022-10-16 14:12:03,025-282-global_steps:520000,grad:8.5486,MSELoss:0.0126,exp_loss:0.2831 +2022-10-16 14:13:50,287-282-global_steps:521000,grad:8.5584,MSELoss:0.0127,exp_loss:0.2840 +2022-10-16 14:15:55,654-282-global_steps:522000,grad:8.6060,MSELoss:0.0128,exp_loss:0.2856 +2022-10-16 14:17:55,300-282-global_steps:523000,grad:8.5773,MSELoss:0.0127,exp_loss:0.2850 +2022-10-16 14:20:05,152-282-global_steps:524000,grad:8.5932,MSELoss:0.0127,exp_loss:0.2846 +2022-10-16 14:22:00,212-282-global_steps:525000,grad:8.5924,MSELoss:0.0127,exp_loss:0.2852 +2022-10-16 14:24:13,042-282-global_steps:526000,grad:8.5744,MSELoss:0.0127,exp_loss:0.2853 +2022-10-16 14:26:24,799-282-global_steps:527000,grad:8.5757,MSELoss:0.0127,exp_loss:0.2857 +2022-10-16 14:28:24,848-282-global_steps:528000,grad:8.5708,MSELoss:0.0128,exp_loss:0.2861 +2022-10-16 14:28:47,772-344-epoch:45 +2022-10-16 14:30:32,388-282-global_steps:529000,grad:8.9281,MSELoss:0.0126,exp_loss:0.2851 +2022-10-16 14:32:37,211-282-global_steps:530000,grad:8.7809,MSELoss:0.0127,exp_loss:0.2855 +2022-10-16 14:34:35,709-282-global_steps:531000,grad:8.7520,MSELoss:0.0127,exp_loss:0.2849 +2022-10-16 14:36:44,089-282-global_steps:532000,grad:8.7582,MSELoss:0.0127,exp_loss:0.2849 +2022-10-16 14:38:43,238-282-global_steps:533000,grad:8.7643,MSELoss:0.0128,exp_loss:0.2867 +2022-10-16 14:40:53,061-282-global_steps:534000,grad:8.7132,MSELoss:0.0128,exp_loss:0.2865 +2022-10-16 14:42:45,152-282-global_steps:535000,grad:8.7181,MSELoss:0.0128,exp_loss:0.2863 +2022-10-16 14:44:46,095-282-global_steps:536000,grad:8.7130,MSELoss:0.0128,exp_loss:0.2857 +2022-10-16 14:46:51,607-282-global_steps:537000,grad:8.6717,MSELoss:0.0127,exp_loss:0.2848 +2022-10-16 14:49:00,619-282-global_steps:538000,grad:8.6474,MSELoss:0.0127,exp_loss:0.2843 +2022-10-16 14:51:03,922-282-global_steps:539000,grad:8.6316,MSELoss:0.0127,exp_loss:0.2835 +2022-10-16 14:52:48,483-344-epoch:46 +2022-10-16 14:53:00,389-282-global_steps:540000,grad:8.8778,MSELoss:0.0131,exp_loss:0.2893 +2022-10-16 14:54:59,571-282-global_steps:541000,grad:8.6780,MSELoss:0.0126,exp_loss:0.2763 +2022-10-16 14:57:07,812-282-global_steps:542000,grad:8.6208,MSELoss:0.0126,exp_loss:0.2788 +2022-10-16 14:59:19,919-282-global_steps:543000,grad:8.6924,MSELoss:0.0127,exp_loss:0.2803 +2022-10-16 15:01:08,889-282-global_steps:544000,grad:8.6487,MSELoss:0.0126,exp_loss:0.2788 +2022-10-16 15:03:00,534-282-global_steps:545000,grad:8.6320,MSELoss:0.0126,exp_loss:0.2794 +2022-10-16 15:05:00,296-282-global_steps:546000,grad:8.6439,MSELoss:0.0126,exp_loss:0.2796 +2022-10-16 15:07:12,237-282-global_steps:547000,grad:8.6047,MSELoss:0.0126,exp_loss:0.2796 +2022-10-16 15:09:25,490-282-global_steps:548000,grad:8.5899,MSELoss:0.0126,exp_loss:0.2807 +2022-10-16 15:11:32,452-282-global_steps:549000,grad:8.5741,MSELoss:0.0127,exp_loss:0.2803 +2022-10-16 15:13:50,436-282-global_steps:550000,grad:8.5430,MSELoss:0.0126,exp_loss:0.2800 +2022-10-16 15:16:07,035-282-global_steps:551000,grad:8.5703,MSELoss:0.0127,exp_loss:0.2798 +2022-10-16 15:17:26,080-344-epoch:47 +2022-10-16 15:18:05,220-282-global_steps:552000,grad:8.8587,MSELoss:0.0124,exp_loss:0.2760 +2022-10-16 15:20:00,828-282-global_steps:553000,grad:8.4868,MSELoss:0.0123,exp_loss:0.2734 +2022-10-16 15:22:14,848-282-global_steps:554000,grad:8.5523,MSELoss:0.0124,exp_loss:0.2762 +2022-10-16 15:24:26,068-282-global_steps:555000,grad:8.5678,MSELoss:0.0124,exp_loss:0.2761 +2022-10-16 15:26:32,064-282-global_steps:556000,grad:8.5768,MSELoss:0.0125,exp_loss:0.2772 +2022-10-16 15:28:33,504-282-global_steps:557000,grad:8.5703,MSELoss:0.0125,exp_loss:0.2775 +2022-10-16 15:30:36,927-282-global_steps:558000,grad:8.5449,MSELoss:0.0125,exp_loss:0.2776 +2022-10-16 15:32:49,506-282-global_steps:559000,grad:8.5424,MSELoss:0.0125,exp_loss:0.2768 +2022-10-16 15:34:54,238-282-global_steps:560000,grad:8.5253,MSELoss:0.0126,exp_loss:0.2774 +2022-10-16 15:36:52,120-282-global_steps:561000,grad:8.5204,MSELoss:0.0126,exp_loss:0.2774 +2022-10-16 15:38:53,561-282-global_steps:562000,grad:8.5428,MSELoss:0.0126,exp_loss:0.2774 +2022-10-16 15:41:05,205-282-global_steps:563000,grad:8.5349,MSELoss:0.0126,exp_loss:0.2776 +2022-10-16 15:41:49,313-344-epoch:48 +2022-10-16 15:43:09,001-282-global_steps:564000,grad:8.5845,MSELoss:0.0125,exp_loss:0.2728 +2022-10-16 15:45:21,231-282-global_steps:565000,grad:8.5435,MSELoss:0.0125,exp_loss:0.2736 +2022-10-16 15:47:20,105-282-global_steps:566000,grad:8.5913,MSELoss:0.0126,exp_loss:0.2759 +2022-10-16 15:49:28,797-282-global_steps:567000,grad:8.5545,MSELoss:0.0127,exp_loss:0.2769 +2022-10-16 15:51:40,920-282-global_steps:568000,grad:8.4955,MSELoss:0.0127,exp_loss:0.2752 +2022-10-16 15:53:49,267-282-global_steps:569000,grad:8.5199,MSELoss:0.0126,exp_loss:0.2750 +2022-10-16 15:56:01,876-282-global_steps:570000,grad:8.4981,MSELoss:0.0126,exp_loss:0.2753 +2022-10-16 15:58:12,633-282-global_steps:571000,grad:8.4854,MSELoss:0.0126,exp_loss:0.2751 +2022-10-16 16:00:21,111-282-global_steps:572000,grad:8.4554,MSELoss:0.0126,exp_loss:0.2746 +2022-10-16 16:02:21,195-282-global_steps:573000,grad:8.4507,MSELoss:0.0126,exp_loss:0.2745 +2022-10-16 16:04:18,685-282-global_steps:574000,grad:8.4533,MSELoss:0.0125,exp_loss:0.2745 +2022-10-16 16:06:18,041-282-global_steps:575000,grad:8.4394,MSELoss:0.0126,exp_loss:0.2749 +2022-10-16 16:06:29,434-344-epoch:49 +2022-10-16 16:08:16,226-282-global_steps:576000,grad:8.5697,MSELoss:0.0125,exp_loss:0.2733 +2022-10-16 16:10:17,602-282-global_steps:577000,grad:8.5727,MSELoss:0.0126,exp_loss:0.2726 +2022-10-16 16:12:14,772-282-global_steps:578000,grad:8.5925,MSELoss:0.0126,exp_loss:0.2740 +2022-10-16 16:14:11,432-282-global_steps:579000,grad:8.5723,MSELoss:0.0125,exp_loss:0.2735 +2022-10-16 16:16:12,493-282-global_steps:580000,grad:8.5158,MSELoss:0.0125,exp_loss:0.2724 +2022-10-16 16:18:11,525-282-global_steps:581000,grad:8.4852,MSELoss:0.0125,exp_loss:0.2727 +2022-10-16 16:20:10,712-282-global_steps:582000,grad:8.5060,MSELoss:0.0125,exp_loss:0.2727 +2022-10-16 16:22:20,804-282-global_steps:583000,grad:8.5101,MSELoss:0.0126,exp_loss:0.2732 +2022-10-16 16:24:29,418-282-global_steps:584000,grad:8.5035,MSELoss:0.0126,exp_loss:0.2732 +2022-10-16 16:26:37,480-282-global_steps:585000,grad:8.4679,MSELoss:0.0125,exp_loss:0.2728 +2022-10-16 16:28:45,648-282-global_steps:586000,grad:8.4389,MSELoss:0.0125,exp_loss:0.2723 +2022-10-16 16:30:30,545-344-epoch:50 +2022-10-16 16:30:49,343-282-global_steps:587000,grad:8.3606,MSELoss:0.0130,exp_loss:0.2704 +2022-10-16 16:32:58,766-282-global_steps:588000,grad:8.3953,MSELoss:0.0126,exp_loss:0.2670 +2022-10-16 16:34:59,677-282-global_steps:589000,grad:8.4562,MSELoss:0.0125,exp_loss:0.2674 +2022-10-16 16:36:53,619-282-global_steps:590000,grad:8.3990,MSELoss:0.0124,exp_loss:0.2662 +2022-10-16 16:39:03,781-282-global_steps:591000,grad:8.4455,MSELoss:0.0124,exp_loss:0.2686 +2022-10-16 16:41:07,738-282-global_steps:592000,grad:8.4535,MSELoss:0.0124,exp_loss:0.2692 +2022-10-16 16:43:13,652-282-global_steps:593000,grad:8.3935,MSELoss:0.0124,exp_loss:0.2686 +2022-10-16 16:45:13,056-282-global_steps:594000,grad:8.4244,MSELoss:0.0124,exp_loss:0.2693 +2022-10-16 16:47:19,244-282-global_steps:595000,grad:8.4184,MSELoss:0.0124,exp_loss:0.2692 +2022-10-16 16:49:00,713-282-global_steps:596000,grad:8.4290,MSELoss:0.0125,exp_loss:0.2693 +2022-10-16 16:51:03,483-282-global_steps:597000,grad:8.4464,MSELoss:0.0125,exp_loss:0.2697 +2022-10-16 16:53:08,320-282-global_steps:598000,grad:8.4352,MSELoss:0.0124,exp_loss:0.2699 +2022-10-16 16:54:24,344-344-epoch:51 +2022-10-16 16:55:19,235-282-global_steps:599000,grad:8.5057,MSELoss:0.0123,exp_loss:0.2658 +2022-10-16 16:57:29,068-282-global_steps:600000,grad:8.5554,MSELoss:0.0124,exp_loss:0.2678 +2022-10-16 16:59:38,443-282-global_steps:601000,grad:8.5643,MSELoss:0.0124,exp_loss:0.2670 +2022-10-16 17:01:44,325-282-global_steps:602000,grad:8.5271,MSELoss:0.0124,exp_loss:0.2672 +2022-10-16 17:03:54,937-282-global_steps:603000,grad:8.4872,MSELoss:0.0124,exp_loss:0.2668 +2022-10-16 17:06:06,742-282-global_steps:604000,grad:8.4272,MSELoss:0.0124,exp_loss:0.2666 +2022-10-16 17:08:12,024-282-global_steps:605000,grad:8.4180,MSELoss:0.0124,exp_loss:0.2663 +2022-10-16 17:10:15,139-282-global_steps:606000,grad:8.3986,MSELoss:0.0124,exp_loss:0.2670 +2022-10-16 17:12:25,940-282-global_steps:607000,grad:8.3818,MSELoss:0.0124,exp_loss:0.2676 +2022-10-16 17:14:36,393-282-global_steps:608000,grad:8.3798,MSELoss:0.0124,exp_loss:0.2672 +2022-10-16 17:16:46,979-282-global_steps:609000,grad:8.3640,MSELoss:0.0124,exp_loss:0.2673 +2022-10-16 17:18:51,725-282-global_steps:610000,grad:8.3469,MSELoss:0.0124,exp_loss:0.2668 +2022-10-16 17:19:36,085-344-epoch:52 +2022-10-16 17:21:07,089-282-global_steps:611000,grad:8.7936,MSELoss:0.0125,exp_loss:0.2704 +2022-10-16 17:23:22,405-282-global_steps:612000,grad:8.5642,MSELoss:0.0124,exp_loss:0.2687 +2022-10-16 17:25:36,087-282-global_steps:613000,grad:8.4838,MSELoss:0.0124,exp_loss:0.2665 +2022-10-16 17:27:44,487-282-global_steps:614000,grad:8.3908,MSELoss:0.0123,exp_loss:0.2652 +2022-10-16 17:29:26,326-282-global_steps:615000,grad:8.4355,MSELoss:0.0124,exp_loss:0.2658 +2022-10-16 17:31:35,457-282-global_steps:616000,grad:8.4504,MSELoss:0.0123,exp_loss:0.2659 +2022-10-16 17:33:46,792-282-global_steps:617000,grad:8.3926,MSELoss:0.0123,exp_loss:0.2650 +2022-10-16 17:35:57,828-282-global_steps:618000,grad:8.3948,MSELoss:0.0123,exp_loss:0.2653 +2022-10-16 17:38:01,682-282-global_steps:619000,grad:8.3906,MSELoss:0.0124,exp_loss:0.2652 +2022-10-16 17:40:09,133-282-global_steps:620000,grad:8.3472,MSELoss:0.0124,exp_loss:0.2650 +2022-10-16 17:42:19,937-282-global_steps:621000,grad:8.3329,MSELoss:0.0123,exp_loss:0.2644 +2022-10-16 17:44:23,474-282-global_steps:622000,grad:8.3394,MSELoss:0.0124,exp_loss:0.2644 +2022-10-16 17:44:30,726-344-epoch:53 +2022-10-16 17:46:27,831-282-global_steps:623000,grad:8.3556,MSELoss:0.0122,exp_loss:0.2636 +2022-10-16 17:48:40,481-282-global_steps:624000,grad:8.3974,MSELoss:0.0123,exp_loss:0.2612 +2022-10-16 17:50:49,391-282-global_steps:625000,grad:8.3720,MSELoss:0.0122,exp_loss:0.2611 +2022-10-16 17:52:47,273-282-global_steps:626000,grad:8.3508,MSELoss:0.0123,exp_loss:0.2624 +2022-10-16 17:54:34,691-282-global_steps:627000,grad:8.3307,MSELoss:0.0124,exp_loss:0.2622 +2022-10-16 17:56:23,363-282-global_steps:628000,grad:8.2913,MSELoss:0.0123,exp_loss:0.2615 +2022-10-16 17:58:24,877-282-global_steps:629000,grad:8.2851,MSELoss:0.0123,exp_loss:0.2612 +2022-10-16 18:00:32,478-282-global_steps:630000,grad:8.2365,MSELoss:0.0123,exp_loss:0.2609 +2022-10-16 18:02:42,939-282-global_steps:631000,grad:8.2508,MSELoss:0.0123,exp_loss:0.2618 +2022-10-16 18:04:53,810-282-global_steps:632000,grad:8.2827,MSELoss:0.0123,exp_loss:0.2625 +2022-10-16 18:07:06,942-282-global_steps:633000,grad:8.2600,MSELoss:0.0123,exp_loss:0.2619 +2022-10-16 18:08:56,968-344-epoch:54 +2022-10-16 18:09:22,768-282-global_steps:634000,grad:8.1570,MSELoss:0.0118,exp_loss:0.2613 +2022-10-16 18:11:38,308-282-global_steps:635000,grad:8.4878,MSELoss:0.0123,exp_loss:0.2627 +2022-10-16 18:13:49,282-282-global_steps:636000,grad:8.4524,MSELoss:0.0123,exp_loss:0.2616 +2022-10-16 18:16:00,326-282-global_steps:637000,grad:8.4025,MSELoss:0.0123,exp_loss:0.2613 +2022-10-16 18:18:17,199-282-global_steps:638000,grad:8.3558,MSELoss:0.0123,exp_loss:0.2614 +2022-10-16 18:20:25,571-282-global_steps:639000,grad:8.3064,MSELoss:0.0123,exp_loss:0.2611 +2022-10-16 18:22:32,163-282-global_steps:640000,grad:8.3192,MSELoss:0.0123,exp_loss:0.2602 +2022-10-16 18:24:39,381-282-global_steps:641000,grad:8.2937,MSELoss:0.0122,exp_loss:0.2596 +2022-10-16 18:26:36,747-282-global_steps:642000,grad:8.2740,MSELoss:0.0122,exp_loss:0.2593 +2022-10-16 18:28:44,838-282-global_steps:643000,grad:8.2685,MSELoss:0.0122,exp_loss:0.2592 +2022-10-16 18:30:42,273-282-global_steps:644000,grad:8.2457,MSELoss:0.0123,exp_loss:0.2592 +2022-10-16 18:32:53,885-282-global_steps:645000,grad:8.2378,MSELoss:0.0123,exp_loss:0.2594 +2022-10-16 18:33:53,510-344-epoch:55 +2022-10-16 18:34:42,702-282-global_steps:646000,grad:8.6650,MSELoss:0.0125,exp_loss:0.2668 +2022-10-16 18:36:21,681-282-global_steps:647000,grad:8.4481,MSELoss:0.0123,exp_loss:0.2623 +2022-10-16 18:38:03,830-282-global_steps:648000,grad:8.4274,MSELoss:0.0122,exp_loss:0.2605 +2022-10-16 18:40:02,931-282-global_steps:649000,grad:8.3921,MSELoss:0.0122,exp_loss:0.2604 +2022-10-16 18:42:10,392-282-global_steps:650000,grad:8.2922,MSELoss:0.0122,exp_loss:0.2593 +2022-10-16 18:44:11,945-282-global_steps:651000,grad:8.3174,MSELoss:0.0122,exp_loss:0.2593 +2022-10-16 18:46:11,419-282-global_steps:652000,grad:8.2973,MSELoss:0.0122,exp_loss:0.2588 +2022-10-16 18:48:20,336-282-global_steps:653000,grad:8.2945,MSELoss:0.0122,exp_loss:0.2587 +2022-10-16 18:50:18,426-282-global_steps:654000,grad:8.2645,MSELoss:0.0122,exp_loss:0.2581 +2022-10-16 18:52:23,371-282-global_steps:655000,grad:8.2553,MSELoss:0.0122,exp_loss:0.2580 +2022-10-16 18:54:31,116-282-global_steps:656000,grad:8.2449,MSELoss:0.0122,exp_loss:0.2578 +2022-10-16 18:56:38,705-282-global_steps:657000,grad:8.2562,MSELoss:0.0122,exp_loss:0.2577 +2022-10-16 18:57:13,915-344-epoch:56 +2022-10-16 18:58:42,012-282-global_steps:658000,grad:7.9723,MSELoss:0.0124,exp_loss:0.2569 +2022-10-16 19:00:32,893-282-global_steps:659000,grad:8.2121,MSELoss:0.0123,exp_loss:0.2559 +2022-10-16 19:02:42,019-282-global_steps:660000,grad:8.2041,MSELoss:0.0123,exp_loss:0.2547 +2022-10-16 19:04:53,476-282-global_steps:661000,grad:8.2247,MSELoss:0.0122,exp_loss:0.2546 +2022-10-16 19:07:00,250-282-global_steps:662000,grad:8.1947,MSELoss:0.0122,exp_loss:0.2547 +2022-10-16 19:09:08,281-282-global_steps:663000,grad:8.1750,MSELoss:0.0122,exp_loss:0.2548 +2022-10-16 19:11:21,277-282-global_steps:664000,grad:8.1738,MSELoss:0.0122,exp_loss:0.2547 +2022-10-16 19:13:32,680-282-global_steps:665000,grad:8.1799,MSELoss:0.0122,exp_loss:0.2547 +2022-10-16 19:15:42,963-282-global_steps:666000,grad:8.1983,MSELoss:0.0122,exp_loss:0.2551 +2022-10-16 19:17:56,312-282-global_steps:667000,grad:8.1612,MSELoss:0.0122,exp_loss:0.2546 +2022-10-16 19:20:08,482-282-global_steps:668000,grad:8.1383,MSELoss:0.0122,exp_loss:0.2544 +2022-10-16 19:22:20,974-282-global_steps:669000,grad:8.1266,MSELoss:0.0122,exp_loss:0.2549 +2022-10-16 19:22:22,229-344-epoch:57 +2022-10-16 19:24:33,912-282-global_steps:670000,grad:8.4536,MSELoss:0.0124,exp_loss:0.2574 +2022-10-16 19:26:48,758-282-global_steps:671000,grad:8.3488,MSELoss:0.0123,exp_loss:0.2557 +2022-10-16 19:29:04,439-282-global_steps:672000,grad:8.2872,MSELoss:0.0122,exp_loss:0.2549 +2022-10-16 19:31:15,467-282-global_steps:673000,grad:8.2978,MSELoss:0.0122,exp_loss:0.2551 +2022-10-16 19:33:30,917-282-global_steps:674000,grad:8.2626,MSELoss:0.0121,exp_loss:0.2535 +2022-10-16 19:35:35,282-282-global_steps:675000,grad:8.2280,MSELoss:0.0121,exp_loss:0.2535 +2022-10-16 19:37:01,035-282-global_steps:676000,grad:8.2129,MSELoss:0.0122,exp_loss:0.2537 +2022-10-16 19:39:06,333-282-global_steps:677000,grad:8.1696,MSELoss:0.0121,exp_loss:0.2532 +2022-10-16 19:41:15,660-282-global_steps:678000,grad:8.1651,MSELoss:0.0121,exp_loss:0.2530 +2022-10-16 19:43:22,060-282-global_steps:679000,grad:8.1349,MSELoss:0.0121,exp_loss:0.2526 +2022-10-16 19:45:31,521-282-global_steps:680000,grad:8.1130,MSELoss:0.0121,exp_loss:0.2529 +2022-10-16 19:47:04,142-344-epoch:58 +2022-10-16 19:47:36,096-282-global_steps:681000,grad:9.4154,MSELoss:0.0119,exp_loss:0.2534 +2022-10-16 19:49:44,769-282-global_steps:682000,grad:8.3912,MSELoss:0.0120,exp_loss:0.2479 +2022-10-16 19:51:57,997-282-global_steps:683000,grad:8.2466,MSELoss:0.0120,exp_loss:0.2483 +2022-10-16 19:54:04,999-282-global_steps:684000,grad:8.2016,MSELoss:0.0120,exp_loss:0.2485 +2022-10-16 19:56:03,845-282-global_steps:685000,grad:8.1947,MSELoss:0.0120,exp_loss:0.2500 +2022-10-16 19:58:12,718-282-global_steps:686000,grad:8.2670,MSELoss:0.0121,exp_loss:0.2514 +2022-10-16 20:00:23,678-282-global_steps:687000,grad:8.2374,MSELoss:0.0121,exp_loss:0.2519 +2022-10-16 20:02:34,846-282-global_steps:688000,grad:8.2052,MSELoss:0.0121,exp_loss:0.2520 +2022-10-16 20:04:43,212-282-global_steps:689000,grad:8.1701,MSELoss:0.0121,exp_loss:0.2515 +2022-10-16 20:06:47,066-282-global_steps:690000,grad:8.1339,MSELoss:0.0121,exp_loss:0.2509 +2022-10-16 20:08:43,238-282-global_steps:691000,grad:8.1299,MSELoss:0.0121,exp_loss:0.2510 +2022-10-16 20:10:49,065-282-global_steps:692000,grad:8.1157,MSELoss:0.0121,exp_loss:0.2509 +2022-10-16 20:11:48,077-344-epoch:59 +2022-10-16 20:12:54,088-282-global_steps:693000,grad:8.4473,MSELoss:0.0124,exp_loss:0.2534 +2022-10-16 20:15:04,840-282-global_steps:694000,grad:8.2559,MSELoss:0.0121,exp_loss:0.2492 +2022-10-16 20:17:02,366-282-global_steps:695000,grad:8.2166,MSELoss:0.0121,exp_loss:0.2502 +2022-10-16 20:19:06,858-282-global_steps:696000,grad:8.2116,MSELoss:0.0120,exp_loss:0.2509 +2022-10-16 20:21:18,737-282-global_steps:697000,grad:8.2058,MSELoss:0.0120,exp_loss:0.2500 +2022-10-16 20:23:28,987-282-global_steps:698000,grad:8.1821,MSELoss:0.0121,exp_loss:0.2499 +2022-10-16 20:25:39,408-282-global_steps:699000,grad:8.1170,MSELoss:0.0120,exp_loss:0.2491 +2022-10-16 20:27:42,589-282-global_steps:700000,grad:8.0998,MSELoss:0.0120,exp_loss:0.2492 +2022-10-16 20:29:52,151-282-global_steps:701000,grad:8.0816,MSELoss:0.0121,exp_loss:0.2491 +2022-10-16 20:32:05,065-282-global_steps:702000,grad:8.0646,MSELoss:0.0120,exp_loss:0.2486 +2022-10-16 20:34:17,966-282-global_steps:703000,grad:8.0581,MSELoss:0.0120,exp_loss:0.2484 +2022-10-16 20:36:29,415-282-global_steps:704000,grad:8.0509,MSELoss:0.0120,exp_loss:0.2485 +2022-10-16 20:36:58,932-344-epoch:60 +2022-10-16 20:38:40,035-282-global_steps:705000,grad:8.1514,MSELoss:0.0119,exp_loss:0.2456 +2022-10-16 20:40:49,263-282-global_steps:706000,grad:8.1768,MSELoss:0.0120,exp_loss:0.2489 +2022-10-16 20:43:01,190-282-global_steps:707000,grad:8.1486,MSELoss:0.0121,exp_loss:0.2487 +2022-10-16 20:45:13,767-282-global_steps:708000,grad:8.1475,MSELoss:0.0120,exp_loss:0.2479 +2022-10-16 20:47:23,551-282-global_steps:709000,grad:8.0934,MSELoss:0.0120,exp_loss:0.2476 +2022-10-16 20:49:32,458-282-global_steps:710000,grad:8.0794,MSELoss:0.0120,exp_loss:0.2479 +2022-10-16 20:51:40,006-282-global_steps:711000,grad:8.0737,MSELoss:0.0120,exp_loss:0.2472 +2022-10-16 20:53:44,123-282-global_steps:712000,grad:8.0510,MSELoss:0.0120,exp_loss:0.2470 +2022-10-16 20:55:50,034-282-global_steps:713000,grad:8.0413,MSELoss:0.0120,exp_loss:0.2474 +2022-10-16 20:57:55,497-282-global_steps:714000,grad:8.0383,MSELoss:0.0120,exp_loss:0.2472 +2022-10-16 21:00:09,140-282-global_steps:715000,grad:8.0150,MSELoss:0.0120,exp_loss:0.2466 +2022-10-16 21:02:08,755-344-epoch:61 +2022-10-16 21:02:14,021-282-global_steps:716000,grad:7.9944,MSELoss:0.0119,exp_loss:0.2489 +2022-10-16 21:04:24,093-282-global_steps:717000,grad:8.0173,MSELoss:0.0119,exp_loss:0.2472 +2022-10-16 21:06:29,064-282-global_steps:718000,grad:8.0190,MSELoss:0.0118,exp_loss:0.2448 +2022-10-16 21:08:21,590-282-global_steps:719000,grad:8.0807,MSELoss:0.0119,exp_loss:0.2460 +2022-10-16 21:10:19,765-282-global_steps:720000,grad:8.0121,MSELoss:0.0119,exp_loss:0.2441 +2022-10-16 21:12:02,621-282-global_steps:721000,grad:7.9892,MSELoss:0.0119,exp_loss:0.2441 +2022-10-16 21:14:07,782-282-global_steps:722000,grad:7.9744,MSELoss:0.0119,exp_loss:0.2446 +2022-10-16 21:16:16,529-282-global_steps:723000,grad:7.9750,MSELoss:0.0120,exp_loss:0.2449 +2022-10-16 21:18:28,982-282-global_steps:724000,grad:7.9683,MSELoss:0.0119,exp_loss:0.2449 +2022-10-16 21:20:41,373-282-global_steps:725000,grad:7.9503,MSELoss:0.0119,exp_loss:0.2447 +2022-10-16 21:22:58,230-282-global_steps:726000,grad:7.9505,MSELoss:0.0119,exp_loss:0.2448 +2022-10-16 21:25:10,555-282-global_steps:727000,grad:7.9441,MSELoss:0.0120,exp_loss:0.2446 +2022-10-16 21:26:47,025-344-epoch:62 +2022-10-16 21:27:28,921-282-global_steps:728000,grad:8.2650,MSELoss:0.0120,exp_loss:0.2434 +2022-10-16 21:29:46,372-282-global_steps:729000,grad:8.2720,MSELoss:0.0121,exp_loss:0.2427 +2022-10-16 21:32:03,166-282-global_steps:730000,grad:8.1781,MSELoss:0.0121,exp_loss:0.2418 +2022-10-16 21:34:20,074-282-global_steps:731000,grad:8.0716,MSELoss:0.0120,exp_loss:0.2414 +2022-10-16 21:36:18,482-282-global_steps:732000,grad:8.0292,MSELoss:0.0119,exp_loss:0.2408 +2022-10-16 21:38:17,441-282-global_steps:733000,grad:7.9949,MSELoss:0.0119,exp_loss:0.2417 +2022-10-16 21:40:18,994-282-global_steps:734000,grad:7.9875,MSELoss:0.0119,exp_loss:0.2417 +2022-10-16 21:42:27,752-282-global_steps:735000,grad:7.9628,MSELoss:0.0119,exp_loss:0.2416 +2022-10-16 21:44:28,015-282-global_steps:736000,grad:7.9652,MSELoss:0.0119,exp_loss:0.2422 +2022-10-16 21:46:35,842-282-global_steps:737000,grad:7.9405,MSELoss:0.0119,exp_loss:0.2421 +2022-10-16 21:48:49,117-282-global_steps:738000,grad:7.9413,MSELoss:0.0119,exp_loss:0.2422 +2022-10-16 21:50:55,397-282-global_steps:739000,grad:7.9404,MSELoss:0.0119,exp_loss:0.2425 +2022-10-16 21:51:43,536-344-epoch:63 +2022-10-16 21:52:50,173-282-global_steps:740000,grad:8.0512,MSELoss:0.0120,exp_loss:0.2366 +2022-10-16 21:54:36,093-282-global_steps:741000,grad:8.0551,MSELoss:0.0120,exp_loss:0.2386 +2022-10-16 21:56:48,659-282-global_steps:742000,grad:8.1040,MSELoss:0.0120,exp_loss:0.2402 +2022-10-16 21:58:53,011-282-global_steps:743000,grad:8.0375,MSELoss:0.0119,exp_loss:0.2411 +2022-10-16 22:00:54,297-282-global_steps:744000,grad:8.0203,MSELoss:0.0119,exp_loss:0.2407 +2022-10-16 22:02:45,441-282-global_steps:745000,grad:8.0159,MSELoss:0.0119,exp_loss:0.2407 +2022-10-16 22:05:01,874-282-global_steps:746000,grad:8.0151,MSELoss:0.0119,exp_loss:0.2409 +2022-10-16 22:07:15,969-282-global_steps:747000,grad:8.0037,MSELoss:0.0119,exp_loss:0.2410 +2022-10-16 22:09:29,335-282-global_steps:748000,grad:7.9982,MSELoss:0.0119,exp_loss:0.2411 +2022-10-16 22:11:39,613-282-global_steps:749000,grad:7.9692,MSELoss:0.0119,exp_loss:0.2410 +2022-10-16 22:13:46,572-282-global_steps:750000,grad:7.9606,MSELoss:0.0119,exp_loss:0.2406 +2022-10-16 22:16:01,223-282-global_steps:751000,grad:7.9293,MSELoss:0.0119,exp_loss:0.2404 +2022-10-16 22:16:23,596-344-epoch:64 +2022-10-16 22:17:45,352-282-global_steps:752000,grad:8.2423,MSELoss:0.0118,exp_loss:0.2434 +2022-10-16 22:19:22,618-282-global_steps:753000,grad:8.0593,MSELoss:0.0118,exp_loss:0.2389 +2022-10-16 22:21:19,522-282-global_steps:754000,grad:8.0796,MSELoss:0.0119,exp_loss:0.2414 +2022-10-16 22:23:24,559-282-global_steps:755000,grad:7.9946,MSELoss:0.0119,exp_loss:0.2397 +2022-10-16 22:25:36,387-282-global_steps:756000,grad:7.9986,MSELoss:0.0119,exp_loss:0.2393 +2022-10-16 22:27:43,529-282-global_steps:757000,grad:7.9819,MSELoss:0.0119,exp_loss:0.2397 +2022-10-16 22:29:52,186-282-global_steps:758000,grad:7.9680,MSELoss:0.0119,exp_loss:0.2394 +2022-10-16 22:31:58,602-282-global_steps:759000,grad:7.9481,MSELoss:0.0119,exp_loss:0.2392 +2022-10-16 22:34:04,434-282-global_steps:760000,grad:7.9245,MSELoss:0.0118,exp_loss:0.2392 +2022-10-16 22:36:13,671-282-global_steps:761000,grad:7.9103,MSELoss:0.0118,exp_loss:0.2388 +2022-10-16 22:38:26,281-282-global_steps:762000,grad:7.8877,MSELoss:0.0118,exp_loss:0.2384 +2022-10-16 22:40:21,192-344-epoch:65 +2022-10-16 22:40:33,717-282-global_steps:763000,grad:7.8408,MSELoss:0.0115,exp_loss:0.2362 +2022-10-16 22:42:42,850-282-global_steps:764000,grad:7.9835,MSELoss:0.0119,exp_loss:0.2368 +2022-10-16 22:44:45,823-282-global_steps:765000,grad:8.0821,MSELoss:0.0119,exp_loss:0.2390 +2022-10-16 22:46:44,148-282-global_steps:766000,grad:8.0638,MSELoss:0.0118,exp_loss:0.2394 +2022-10-16 22:48:47,112-282-global_steps:767000,grad:8.0339,MSELoss:0.0118,exp_loss:0.2382 +2022-10-16 22:50:51,740-282-global_steps:768000,grad:7.9282,MSELoss:0.0118,exp_loss:0.2370 +2022-10-16 22:53:03,446-282-global_steps:769000,grad:7.9279,MSELoss:0.0118,exp_loss:0.2365 +2022-10-16 22:55:06,983-282-global_steps:770000,grad:7.9023,MSELoss:0.0118,exp_loss:0.2368 +2022-10-16 22:57:12,417-282-global_steps:771000,grad:7.8952,MSELoss:0.0118,exp_loss:0.2375 +2022-10-16 22:59:16,507-282-global_steps:772000,grad:7.8831,MSELoss:0.0118,exp_loss:0.2368 +2022-10-16 23:01:23,361-282-global_steps:773000,grad:7.8876,MSELoss:0.0118,exp_loss:0.2370 +2022-10-16 23:03:30,405-282-global_steps:774000,grad:7.8640,MSELoss:0.0118,exp_loss:0.2367 +2022-10-16 23:04:51,595-344-epoch:66 +2022-10-16 23:05:39,225-282-global_steps:775000,grad:8.0977,MSELoss:0.0122,exp_loss:0.2397 +2022-10-16 23:07:43,210-282-global_steps:776000,grad:8.0657,MSELoss:0.0122,exp_loss:0.2368 +2022-10-16 23:09:52,827-282-global_steps:777000,grad:8.1193,MSELoss:0.0121,exp_loss:0.2366 +2022-10-16 23:11:59,011-282-global_steps:778000,grad:8.1618,MSELoss:0.0120,exp_loss:0.2372 +2022-10-16 23:14:02,263-282-global_steps:779000,grad:8.0615,MSELoss:0.0119,exp_loss:0.2356 +2022-10-16 23:16:06,843-282-global_steps:780000,grad:8.0350,MSELoss:0.0119,exp_loss:0.2360 +2022-10-16 23:18:03,774-282-global_steps:781000,grad:7.9806,MSELoss:0.0118,exp_loss:0.2362 +2022-10-16 23:20:10,898-282-global_steps:782000,grad:7.9588,MSELoss:0.0118,exp_loss:0.2361 +2022-10-16 23:22:23,968-282-global_steps:783000,grad:7.9199,MSELoss:0.0118,exp_loss:0.2353 +2022-10-16 23:24:37,896-282-global_steps:784000,grad:7.8800,MSELoss:0.0118,exp_loss:0.2348 +2022-10-16 23:26:50,553-282-global_steps:785000,grad:7.8765,MSELoss:0.0118,exp_loss:0.2351 +2022-10-16 23:28:33,175-282-global_steps:786000,grad:7.8620,MSELoss:0.0118,exp_loss:0.2353 +2022-10-16 23:29:10,840-344-epoch:67 +2022-10-16 23:30:15,707-282-global_steps:787000,grad:8.0553,MSELoss:0.0117,exp_loss:0.2355 +2022-10-16 23:32:23,485-282-global_steps:788000,grad:8.0193,MSELoss:0.0116,exp_loss:0.2339 +2022-10-16 23:34:24,192-282-global_steps:789000,grad:8.0404,MSELoss:0.0117,exp_loss:0.2354 +2022-10-16 23:36:31,076-282-global_steps:790000,grad:8.0188,MSELoss:0.0117,exp_loss:0.2349 +2022-10-16 23:38:43,203-282-global_steps:791000,grad:7.9396,MSELoss:0.0117,exp_loss:0.2331 +2022-10-16 23:40:37,387-282-global_steps:792000,grad:7.9248,MSELoss:0.0117,exp_loss:0.2340 +2022-10-16 23:42:31,183-282-global_steps:793000,grad:7.9352,MSELoss:0.0117,exp_loss:0.2347 +2022-10-16 23:44:27,632-282-global_steps:794000,grad:7.8988,MSELoss:0.0117,exp_loss:0.2344 +2022-10-16 23:46:15,693-282-global_steps:795000,grad:7.8658,MSELoss:0.0117,exp_loss:0.2337 +2022-10-16 23:48:14,321-282-global_steps:796000,grad:7.8443,MSELoss:0.0117,exp_loss:0.2337 +2022-10-16 23:49:56,139-282-global_steps:797000,grad:7.8188,MSELoss:0.0117,exp_loss:0.2334 +2022-10-16 23:51:31,157-282-global_steps:798000,grad:7.7975,MSELoss:0.0117,exp_loss:0.2332 +2022-10-16 23:51:43,736-344-epoch:68 +2022-10-16 23:53:35,769-282-global_steps:799000,grad:8.0066,MSELoss:0.0118,exp_loss:0.2302 +2022-10-16 23:55:42,718-282-global_steps:800000,grad:7.8436,MSELoss:0.0116,exp_loss:0.2283 +2022-10-16 23:57:31,341-282-global_steps:801000,grad:7.8316,MSELoss:0.0116,exp_loss:0.2285 +2022-10-16 23:59:22,397-282-global_steps:802000,grad:7.8666,MSELoss:0.0117,exp_loss:0.2289 +2022-10-17 00:01:31,731-282-global_steps:803000,grad:7.8391,MSELoss:0.0117,exp_loss:0.2290 +2022-10-17 00:03:49,687-282-global_steps:804000,grad:7.7932,MSELoss:0.0116,exp_loss:0.2290 +2022-10-17 00:06:07,921-282-global_steps:805000,grad:7.8147,MSELoss:0.0117,exp_loss:0.2300 +2022-10-17 00:08:26,498-282-global_steps:806000,grad:7.8076,MSELoss:0.0117,exp_loss:0.2305 +2022-10-17 00:10:34,658-282-global_steps:807000,grad:7.7977,MSELoss:0.0117,exp_loss:0.2308 +2022-10-17 00:12:22,439-282-global_steps:808000,grad:7.7861,MSELoss:0.0116,exp_loss:0.2309 +2022-10-17 00:14:16,988-282-global_steps:809000,grad:7.7656,MSELoss:0.0117,exp_loss:0.2312 +2022-10-17 00:16:02,781-344-epoch:69 +2022-10-17 00:16:21,870-282-global_steps:810000,grad:7.5665,MSELoss:0.0117,exp_loss:0.2379 +2022-10-17 00:18:33,076-282-global_steps:811000,grad:7.9213,MSELoss:0.0118,exp_loss:0.2331 +2022-10-17 00:20:44,178-282-global_steps:812000,grad:7.8457,MSELoss:0.0117,exp_loss:0.2315 +2022-10-17 00:22:55,009-282-global_steps:813000,grad:7.8360,MSELoss:0.0117,exp_loss:0.2307 +2022-10-17 00:25:06,432-282-global_steps:814000,grad:7.8725,MSELoss:0.0117,exp_loss:0.2317 +2022-10-17 00:27:11,315-282-global_steps:815000,grad:7.8525,MSELoss:0.0117,exp_loss:0.2311 +2022-10-17 00:29:20,584-282-global_steps:816000,grad:7.7893,MSELoss:0.0117,exp_loss:0.2297 +2022-10-17 00:31:28,626-282-global_steps:817000,grad:7.7671,MSELoss:0.0117,exp_loss:0.2295 +2022-10-17 00:33:40,271-282-global_steps:818000,grad:7.7570,MSELoss:0.0117,exp_loss:0.2297 +2022-10-17 00:35:47,022-282-global_steps:819000,grad:7.7385,MSELoss:0.0117,exp_loss:0.2299 +2022-10-17 00:37:52,166-282-global_steps:820000,grad:7.7182,MSELoss:0.0116,exp_loss:0.2298 +2022-10-17 00:39:54,605-282-global_steps:821000,grad:7.7126,MSELoss:0.0116,exp_loss:0.2297 +2022-10-17 00:41:11,210-344-epoch:70 +2022-10-17 00:42:05,223-282-global_steps:822000,grad:8.1518,MSELoss:0.0116,exp_loss:0.2336 +2022-10-17 00:44:15,270-282-global_steps:823000,grad:8.0252,MSELoss:0.0118,exp_loss:0.2298 +2022-10-17 00:46:26,155-282-global_steps:824000,grad:7.9490,MSELoss:0.0116,exp_loss:0.2302 +2022-10-17 00:48:29,201-282-global_steps:825000,grad:7.8836,MSELoss:0.0116,exp_loss:0.2301 +2022-10-17 00:50:21,991-282-global_steps:826000,grad:7.8935,MSELoss:0.0117,exp_loss:0.2303 +2022-10-17 00:52:23,941-282-global_steps:827000,grad:7.8411,MSELoss:0.0117,exp_loss:0.2300 +2022-10-17 00:54:42,093-282-global_steps:828000,grad:7.8152,MSELoss:0.0117,exp_loss:0.2293 +2022-10-17 00:56:52,555-282-global_steps:829000,grad:7.7702,MSELoss:0.0116,exp_loss:0.2287 +2022-10-17 00:59:06,288-282-global_steps:830000,grad:7.7352,MSELoss:0.0116,exp_loss:0.2280 +2022-10-17 01:01:21,398-282-global_steps:831000,grad:7.7245,MSELoss:0.0116,exp_loss:0.2282 +2022-10-17 01:03:35,176-282-global_steps:832000,grad:7.7287,MSELoss:0.0116,exp_loss:0.2282 +2022-10-17 01:05:39,106-282-global_steps:833000,grad:7.7026,MSELoss:0.0116,exp_loss:0.2280 +2022-10-17 01:06:13,779-344-epoch:71 +2022-10-17 01:07:24,512-282-global_steps:834000,grad:7.9753,MSELoss:0.0121,exp_loss:0.2269 +2022-10-17 01:09:34,386-282-global_steps:835000,grad:7.8901,MSELoss:0.0117,exp_loss:0.2253 +2022-10-17 01:11:41,775-282-global_steps:836000,grad:7.8844,MSELoss:0.0116,exp_loss:0.2262 +2022-10-17 01:13:45,555-282-global_steps:837000,grad:7.8335,MSELoss:0.0116,exp_loss:0.2265 +2022-10-17 01:15:56,565-282-global_steps:838000,grad:7.7747,MSELoss:0.0115,exp_loss:0.2255 +2022-10-17 01:18:00,884-282-global_steps:839000,grad:7.7547,MSELoss:0.0116,exp_loss:0.2263 +2022-10-17 01:20:08,258-282-global_steps:840000,grad:7.7623,MSELoss:0.0116,exp_loss:0.2263 +2022-10-17 01:22:08,705-282-global_steps:841000,grad:7.7631,MSELoss:0.0116,exp_loss:0.2264 +2022-10-17 01:24:19,203-282-global_steps:842000,grad:7.7420,MSELoss:0.0116,exp_loss:0.2266 +2022-10-17 01:26:29,015-282-global_steps:843000,grad:7.7281,MSELoss:0.0116,exp_loss:0.2267 +2022-10-17 01:28:36,255-282-global_steps:844000,grad:7.7138,MSELoss:0.0116,exp_loss:0.2266 +2022-10-17 01:30:44,742-282-global_steps:845000,grad:7.6854,MSELoss:0.0116,exp_loss:0.2263 +2022-10-17 01:30:52,627-344-epoch:72 +2022-10-17 01:32:48,564-282-global_steps:846000,grad:7.8284,MSELoss:0.0114,exp_loss:0.2247 +2022-10-17 01:35:00,238-282-global_steps:847000,grad:7.8031,MSELoss:0.0113,exp_loss:0.2247 +2022-10-17 01:37:02,536-282-global_steps:848000,grad:7.8378,MSELoss:0.0114,exp_loss:0.2234 +2022-10-17 01:39:14,268-282-global_steps:849000,grad:7.8059,MSELoss:0.0114,exp_loss:0.2229 +2022-10-17 01:41:27,817-282-global_steps:850000,grad:7.7746,MSELoss:0.0114,exp_loss:0.2231 +2022-10-17 01:43:39,255-282-global_steps:851000,grad:7.7548,MSELoss:0.0114,exp_loss:0.2236 +2022-10-17 01:45:46,577-282-global_steps:852000,grad:7.7085,MSELoss:0.0115,exp_loss:0.2235 +2022-10-17 01:47:58,143-282-global_steps:853000,grad:7.7052,MSELoss:0.0115,exp_loss:0.2242 +2022-10-17 01:50:06,019-282-global_steps:854000,grad:7.7021,MSELoss:0.0115,exp_loss:0.2249 +2022-10-17 01:52:14,320-282-global_steps:855000,grad:7.6721,MSELoss:0.0115,exp_loss:0.2246 +2022-10-17 01:54:20,298-282-global_steps:856000,grad:7.6472,MSELoss:0.0115,exp_loss:0.2247 +2022-10-17 01:56:03,321-344-epoch:73 +2022-10-17 01:56:26,923-282-global_steps:857000,grad:7.7111,MSELoss:0.0116,exp_loss:0.2206 +2022-10-17 01:58:26,751-282-global_steps:858000,grad:7.7155,MSELoss:0.0116,exp_loss:0.2246 +2022-10-17 02:00:29,937-282-global_steps:859000,grad:7.8928,MSELoss:0.0115,exp_loss:0.2260 +2022-10-17 02:02:39,748-282-global_steps:860000,grad:7.8646,MSELoss:0.0116,exp_loss:0.2250 +2022-10-17 02:04:44,987-282-global_steps:861000,grad:7.8335,MSELoss:0.0115,exp_loss:0.2251 +2022-10-17 02:06:54,625-282-global_steps:862000,grad:7.7896,MSELoss:0.0115,exp_loss:0.2249 +2022-10-17 02:09:04,467-282-global_steps:863000,grad:7.7474,MSELoss:0.0115,exp_loss:0.2243 +2022-10-17 02:11:14,763-282-global_steps:864000,grad:7.7197,MSELoss:0.0115,exp_loss:0.2243 +2022-10-17 02:13:28,394-282-global_steps:865000,grad:7.6835,MSELoss:0.0115,exp_loss:0.2238 +2022-10-17 02:15:30,404-282-global_steps:866000,grad:7.6661,MSELoss:0.0115,exp_loss:0.2233 +2022-10-17 02:17:43,791-282-global_steps:867000,grad:7.6542,MSELoss:0.0115,exp_loss:0.2232 +2022-10-17 02:19:58,814-282-global_steps:868000,grad:7.6338,MSELoss:0.0115,exp_loss:0.2231 +2022-10-17 02:21:10,334-344-epoch:74 +2022-10-17 02:22:12,239-282-global_steps:869000,grad:7.8045,MSELoss:0.0113,exp_loss:0.2187 +2022-10-17 02:24:27,637-282-global_steps:870000,grad:7.7696,MSELoss:0.0114,exp_loss:0.2217 +2022-10-17 02:26:41,517-282-global_steps:871000,grad:7.7380,MSELoss:0.0115,exp_loss:0.2219 +2022-10-17 02:28:56,091-282-global_steps:872000,grad:7.6374,MSELoss:0.0114,exp_loss:0.2207 +2022-10-17 02:31:00,778-282-global_steps:873000,grad:7.6856,MSELoss:0.0115,exp_loss:0.2215 +2022-10-17 02:33:18,478-282-global_steps:874000,grad:7.6599,MSELoss:0.0114,exp_loss:0.2210 +2022-10-17 02:35:34,618-282-global_steps:875000,grad:7.6437,MSELoss:0.0114,exp_loss:0.2207 +2022-10-17 02:37:40,996-282-global_steps:876000,grad:7.6427,MSELoss:0.0114,exp_loss:0.2212 +2022-10-17 02:39:51,077-282-global_steps:877000,grad:7.6107,MSELoss:0.0114,exp_loss:0.2208 +2022-10-17 02:42:05,437-282-global_steps:878000,grad:7.6097,MSELoss:0.0115,exp_loss:0.2215 +2022-10-17 02:44:19,401-282-global_steps:879000,grad:7.5865,MSELoss:0.0114,exp_loss:0.2216 +2022-10-17 02:46:35,303-282-global_steps:880000,grad:7.5741,MSELoss:0.0115,exp_loss:0.2214 +2022-10-17 02:47:08,286-344-epoch:75 +2022-10-17 02:48:46,768-282-global_steps:881000,grad:7.5983,MSELoss:0.0111,exp_loss:0.2153 +2022-10-17 02:51:02,693-282-global_steps:882000,grad:7.7142,MSELoss:0.0113,exp_loss:0.2202 +2022-10-17 02:53:16,524-282-global_steps:883000,grad:7.6103,MSELoss:0.0113,exp_loss:0.2176 +2022-10-17 02:55:28,427-282-global_steps:884000,grad:7.6076,MSELoss:0.0114,exp_loss:0.2175 +2022-10-17 02:57:33,409-282-global_steps:885000,grad:7.5941,MSELoss:0.0114,exp_loss:0.2180 +2022-10-17 02:59:30,453-282-global_steps:886000,grad:7.5906,MSELoss:0.0114,exp_loss:0.2184 +2022-10-17 03:01:40,903-282-global_steps:887000,grad:7.5840,MSELoss:0.0114,exp_loss:0.2184 +2022-10-17 03:03:48,293-282-global_steps:888000,grad:7.5771,MSELoss:0.0114,exp_loss:0.2183 +2022-10-17 03:05:59,817-282-global_steps:889000,grad:7.5684,MSELoss:0.0114,exp_loss:0.2187 +2022-10-17 03:08:11,983-282-global_steps:890000,grad:7.5684,MSELoss:0.0115,exp_loss:0.2194 +2022-10-17 03:10:21,744-282-global_steps:891000,grad:7.5526,MSELoss:0.0114,exp_loss:0.2197 +2022-10-17 03:12:30,961-282-global_steps:892000,grad:7.5505,MSELoss:0.0114,exp_loss:0.2200 +2022-10-17 03:12:32,548-344-epoch:76 +2022-10-17 03:14:41,326-282-global_steps:893000,grad:7.8231,MSELoss:0.0114,exp_loss:0.2206 +2022-10-17 03:16:45,474-282-global_steps:894000,grad:7.7936,MSELoss:0.0114,exp_loss:0.2193 +2022-10-17 03:18:58,656-282-global_steps:895000,grad:7.7388,MSELoss:0.0114,exp_loss:0.2178 +2022-10-17 03:21:14,277-282-global_steps:896000,grad:7.7075,MSELoss:0.0114,exp_loss:0.2182 +2022-10-17 03:23:30,474-282-global_steps:897000,grad:7.6427,MSELoss:0.0114,exp_loss:0.2180 +2022-10-17 03:25:45,691-282-global_steps:898000,grad:7.6319,MSELoss:0.0114,exp_loss:0.2180 +2022-10-17 03:28:01,278-282-global_steps:899000,grad:7.6186,MSELoss:0.0114,exp_loss:0.2186 +2022-10-17 03:30:14,885-282-global_steps:900000,grad:7.6239,MSELoss:0.0114,exp_loss:0.2190 +2022-10-17 03:32:26,825-282-global_steps:901000,grad:7.6130,MSELoss:0.0114,exp_loss:0.2187 +2022-10-17 03:34:41,311-282-global_steps:902000,grad:7.5926,MSELoss:0.0114,exp_loss:0.2188 +2022-10-17 03:36:53,194-282-global_steps:903000,grad:7.5781,MSELoss:0.0114,exp_loss:0.2187 +2022-10-17 03:38:32,671-344-epoch:77 +2022-10-17 03:39:06,173-282-global_steps:904000,grad:8.0077,MSELoss:0.0114,exp_loss:0.2170 +2022-10-17 03:41:19,906-282-global_steps:905000,grad:7.6753,MSELoss:0.0113,exp_loss:0.2126 +2022-10-17 03:43:29,462-282-global_steps:906000,grad:7.6103,MSELoss:0.0114,exp_loss:0.2145 +2022-10-17 03:45:27,360-282-global_steps:907000,grad:7.5865,MSELoss:0.0114,exp_loss:0.2155 +2022-10-17 03:47:32,428-282-global_steps:908000,grad:7.5436,MSELoss:0.0113,exp_loss:0.2155 +2022-10-17 03:49:42,902-282-global_steps:909000,grad:7.5275,MSELoss:0.0113,exp_loss:0.2155 +2022-10-17 03:51:44,876-282-global_steps:910000,grad:7.5278,MSELoss:0.0113,exp_loss:0.2158 +2022-10-17 03:53:34,682-282-global_steps:911000,grad:7.5122,MSELoss:0.0113,exp_loss:0.2158 +2022-10-17 03:55:25,255-282-global_steps:912000,grad:7.5058,MSELoss:0.0113,exp_loss:0.2159 +2022-10-17 03:57:27,050-282-global_steps:913000,grad:7.5152,MSELoss:0.0113,exp_loss:0.2166 +2022-10-17 03:59:38,920-282-global_steps:914000,grad:7.5018,MSELoss:0.0113,exp_loss:0.2166 +2022-10-17 04:01:45,930-282-global_steps:915000,grad:7.5055,MSELoss:0.0113,exp_loss:0.2170 +2022-10-17 04:02:49,724-344-epoch:78 +2022-10-17 04:03:53,733-282-global_steps:916000,grad:7.6636,MSELoss:0.0113,exp_loss:0.2189 +2022-10-17 04:05:59,461-282-global_steps:917000,grad:7.6596,MSELoss:0.0113,exp_loss:0.2165 +2022-10-17 04:08:14,364-282-global_steps:918000,grad:7.6452,MSELoss:0.0114,exp_loss:0.2155 +2022-10-17 04:10:26,721-282-global_steps:919000,grad:7.5479,MSELoss:0.0113,exp_loss:0.2138 +2022-10-17 04:12:34,555-282-global_steps:920000,grad:7.5181,MSELoss:0.0113,exp_loss:0.2140 +2022-10-17 04:14:43,798-282-global_steps:921000,grad:7.4951,MSELoss:0.0113,exp_loss:0.2134 +2022-10-17 04:16:55,569-282-global_steps:922000,grad:7.4892,MSELoss:0.0113,exp_loss:0.2143 +2022-10-17 04:19:05,686-282-global_steps:923000,grad:7.4667,MSELoss:0.0113,exp_loss:0.2138 +2022-10-17 04:21:13,701-282-global_steps:924000,grad:7.4610,MSELoss:0.0113,exp_loss:0.2140 +2022-10-17 04:23:25,345-282-global_steps:925000,grad:7.4718,MSELoss:0.0113,exp_loss:0.2148 +2022-10-17 04:25:38,152-282-global_steps:926000,grad:7.4695,MSELoss:0.0113,exp_loss:0.2152 +2022-10-17 04:27:35,680-282-global_steps:927000,grad:7.4680,MSELoss:0.0113,exp_loss:0.2155 +2022-10-17 04:28:02,870-344-epoch:79 +2022-10-17 04:29:35,473-282-global_steps:928000,grad:7.6867,MSELoss:0.0116,exp_loss:0.2136 +2022-10-17 04:31:40,410-282-global_steps:929000,grad:7.6429,MSELoss:0.0115,exp_loss:0.2123 +2022-10-17 04:33:36,134-282-global_steps:930000,grad:7.6193,MSELoss:0.0113,exp_loss:0.2131 +2022-10-17 04:35:23,095-282-global_steps:931000,grad:7.5421,MSELoss:0.0113,exp_loss:0.2126 +2022-10-17 04:37:27,637-282-global_steps:932000,grad:7.5521,MSELoss:0.0113,exp_loss:0.2134 +2022-10-17 04:39:31,789-282-global_steps:933000,grad:7.5909,MSELoss:0.0113,exp_loss:0.2139 +2022-10-17 04:41:35,741-282-global_steps:934000,grad:7.5837,MSELoss:0.0113,exp_loss:0.2144 +2022-10-17 04:43:37,586-282-global_steps:935000,grad:7.5626,MSELoss:0.0113,exp_loss:0.2145 +2022-10-17 04:45:51,769-282-global_steps:936000,grad:7.5196,MSELoss:0.0113,exp_loss:0.2139 +2022-10-17 04:48:06,507-282-global_steps:937000,grad:7.5205,MSELoss:0.0113,exp_loss:0.2141 +2022-10-17 04:50:18,087-282-global_steps:938000,grad:7.5113,MSELoss:0.0113,exp_loss:0.2143 +2022-10-17 04:52:23,610-344-epoch:80 +2022-10-17 04:52:29,082-282-global_steps:939000,grad:6.9403,MSELoss:0.0108,exp_loss:0.2021 +2022-10-17 04:54:44,170-282-global_steps:940000,grad:7.6692,MSELoss:0.0111,exp_loss:0.2132 +2022-10-17 04:56:45,882-282-global_steps:941000,grad:7.6857,MSELoss:0.0112,exp_loss:0.2136 +2022-10-17 04:58:57,319-282-global_steps:942000,grad:7.6230,MSELoss:0.0112,exp_loss:0.2132 +2022-10-17 05:01:13,406-282-global_steps:943000,grad:7.5266,MSELoss:0.0112,exp_loss:0.2116 +2022-10-17 05:03:30,201-282-global_steps:944000,grad:7.5309,MSELoss:0.0112,exp_loss:0.2116 +2022-10-17 05:05:41,642-282-global_steps:945000,grad:7.5094,MSELoss:0.0112,exp_loss:0.2115 +2022-10-17 05:07:57,412-282-global_steps:946000,grad:7.5048,MSELoss:0.0112,exp_loss:0.2122 +2022-10-17 05:10:12,777-282-global_steps:947000,grad:7.5057,MSELoss:0.0112,exp_loss:0.2124 +2022-10-17 05:12:21,598-282-global_steps:948000,grad:7.4899,MSELoss:0.0112,exp_loss:0.2129 +2022-10-17 05:14:34,563-282-global_steps:949000,grad:7.4686,MSELoss:0.0113,exp_loss:0.2127 +2022-10-17 05:16:45,867-282-global_steps:950000,grad:7.4571,MSELoss:0.0113,exp_loss:0.2127 +2022-10-17 05:17:49,955-344-epoch:81 +2022-10-17 05:18:15,460-282-global_steps:951000,grad:7.7735,MSELoss:0.0113,exp_loss:0.2116 +2022-10-17 05:19:59,316-282-global_steps:952000,grad:7.5949,MSELoss:0.0111,exp_loss:0.2090 +2022-10-17 05:22:03,829-282-global_steps:953000,grad:7.5723,MSELoss:0.0112,exp_loss:0.2118 +2022-10-17 05:24:11,625-282-global_steps:954000,grad:7.5561,MSELoss:0.0113,exp_loss:0.2119 +2022-10-17 05:26:13,785-282-global_steps:955000,grad:7.5117,MSELoss:0.0113,exp_loss:0.2121 +2022-10-17 05:28:13,907-282-global_steps:956000,grad:7.5012,MSELoss:0.0112,exp_loss:0.2123 +2022-10-17 05:30:22,787-282-global_steps:957000,grad:7.5083,MSELoss:0.0113,exp_loss:0.2126 +2022-10-17 05:32:19,235-282-global_steps:958000,grad:7.4743,MSELoss:0.0112,exp_loss:0.2121 +2022-10-17 05:34:16,759-282-global_steps:959000,grad:7.4792,MSELoss:0.0112,exp_loss:0.2124 +2022-10-17 05:36:24,279-282-global_steps:960000,grad:7.4511,MSELoss:0.0113,exp_loss:0.2124 +2022-10-17 05:38:29,120-282-global_steps:961000,grad:7.4379,MSELoss:0.0113,exp_loss:0.2122 +2022-10-17 05:40:37,655-282-global_steps:962000,grad:7.4012,MSELoss:0.0112,exp_loss:0.2115 +2022-10-17 05:41:32,265-344-epoch:82 +2022-10-17 05:42:40,662-282-global_steps:963000,grad:7.4022,MSELoss:0.0112,exp_loss:0.2034 +2022-10-17 05:44:47,422-282-global_steps:964000,grad:7.4007,MSELoss:0.0112,exp_loss:0.2054 +2022-10-17 05:46:30,085-282-global_steps:965000,grad:7.3869,MSELoss:0.0112,exp_loss:0.2051 +2022-10-17 05:48:15,295-282-global_steps:966000,grad:7.3965,MSELoss:0.0112,exp_loss:0.2055 +2022-10-17 05:50:09,019-282-global_steps:967000,grad:7.4050,MSELoss:0.0112,exp_loss:0.2080 +2022-10-17 05:52:20,987-282-global_steps:968000,grad:7.4047,MSELoss:0.0112,exp_loss:0.2087 +2022-10-17 05:54:28,496-282-global_steps:969000,grad:7.3914,MSELoss:0.0112,exp_loss:0.2087 +2022-10-17 05:56:40,451-282-global_steps:970000,grad:7.4043,MSELoss:0.0112,exp_loss:0.2095 +2022-10-17 05:58:42,315-282-global_steps:971000,grad:7.4003,MSELoss:0.0112,exp_loss:0.2097 +2022-10-17 06:00:53,115-282-global_steps:972000,grad:7.3906,MSELoss:0.0112,exp_loss:0.2097 +2022-10-17 06:03:02,299-282-global_steps:973000,grad:7.3785,MSELoss:0.0112,exp_loss:0.2098 +2022-10-17 06:05:14,338-282-global_steps:974000,grad:7.3731,MSELoss:0.0112,exp_loss:0.2098 +2022-10-17 06:05:37,733-344-epoch:83 +2022-10-17 06:07:27,179-282-global_steps:975000,grad:7.6162,MSELoss:0.0111,exp_loss:0.2101 +2022-10-17 06:09:32,593-282-global_steps:976000,grad:7.6057,MSELoss:0.0112,exp_loss:0.2090 +2022-10-17 06:11:42,012-282-global_steps:977000,grad:7.4814,MSELoss:0.0112,exp_loss:0.2074 +2022-10-17 06:13:50,097-282-global_steps:978000,grad:7.4311,MSELoss:0.0112,exp_loss:0.2073 +2022-10-17 06:16:03,691-282-global_steps:979000,grad:7.3961,MSELoss:0.0111,exp_loss:0.2075 +2022-10-17 06:18:13,826-282-global_steps:980000,grad:7.3926,MSELoss:0.0111,exp_loss:0.2074 +2022-10-17 06:20:26,906-282-global_steps:981000,grad:7.3772,MSELoss:0.0111,exp_loss:0.2077 +2022-10-17 06:22:37,582-282-global_steps:982000,grad:7.3677,MSELoss:0.0111,exp_loss:0.2076 +2022-10-17 06:24:37,963-282-global_steps:983000,grad:7.3736,MSELoss:0.0112,exp_loss:0.2080 +2022-10-17 06:26:44,025-282-global_steps:984000,grad:7.3699,MSELoss:0.0112,exp_loss:0.2082 +2022-10-17 06:28:53,894-282-global_steps:985000,grad:7.3657,MSELoss:0.0112,exp_loss:0.2086 +2022-10-17 06:30:43,079-344-epoch:84 +2022-10-17 06:30:53,536-282-global_steps:986000,grad:7.9300,MSELoss:0.0109,exp_loss:0.2154 +2022-10-17 06:32:58,507-282-global_steps:987000,grad:7.5434,MSELoss:0.0113,exp_loss:0.2083 +2022-10-17 06:35:08,181-282-global_steps:988000,grad:7.5145,MSELoss:0.0112,exp_loss:0.2083 +2022-10-17 06:37:10,678-282-global_steps:989000,grad:7.4914,MSELoss:0.0111,exp_loss:0.2082 +2022-10-17 06:39:15,675-282-global_steps:990000,grad:7.4591,MSELoss:0.0111,exp_loss:0.2083 +2022-10-17 06:41:22,951-282-global_steps:991000,grad:7.4410,MSELoss:0.0111,exp_loss:0.2081 +2022-10-17 06:43:31,068-282-global_steps:992000,grad:7.4354,MSELoss:0.0112,exp_loss:0.2083 +2022-10-17 06:45:36,918-282-global_steps:993000,grad:7.3946,MSELoss:0.0111,exp_loss:0.2076 +2022-10-17 06:47:40,121-282-global_steps:994000,grad:7.3615,MSELoss:0.0112,exp_loss:0.2077 +2022-10-17 06:49:52,404-282-global_steps:995000,grad:7.3462,MSELoss:0.0111,exp_loss:0.2073 +2022-10-17 06:52:01,433-282-global_steps:996000,grad:7.3334,MSELoss:0.0111,exp_loss:0.2071 +2022-10-17 06:54:13,451-282-global_steps:997000,grad:7.3228,MSELoss:0.0111,exp_loss:0.2071 +2022-10-17 06:55:34,074-344-epoch:85 +2022-10-17 06:56:19,598-282-global_steps:998000,grad:7.2468,MSELoss:0.0108,exp_loss:0.2013 +2022-10-17 06:58:26,925-282-global_steps:999000,grad:7.5322,MSELoss:0.0110,exp_loss:0.2053 +2022-10-17 07:00:37,641-282-global_steps:1000000,grad:7.4637,MSELoss:0.0110,exp_loss:0.2043 +2022-10-17 07:02:49,720-282-global_steps:1001000,grad:7.4386,MSELoss:0.0110,exp_loss:0.2048 +2022-10-17 07:05:00,925-282-global_steps:1002000,grad:7.4271,MSELoss:0.0110,exp_loss:0.2055 +2022-10-17 07:07:15,115-282-global_steps:1003000,grad:7.3998,MSELoss:0.0110,exp_loss:0.2056 +2022-10-17 07:09:32,636-282-global_steps:1004000,grad:7.3757,MSELoss:0.0111,exp_loss:0.2062 +2022-10-17 07:11:50,072-282-global_steps:1005000,grad:7.3581,MSELoss:0.0111,exp_loss:0.2062 +2022-10-17 07:14:05,629-282-global_steps:1006000,grad:7.3663,MSELoss:0.0111,exp_loss:0.2064 +2022-10-17 07:16:24,577-282-global_steps:1007000,grad:7.3405,MSELoss:0.0111,exp_loss:0.2066 +2022-10-17 07:18:45,135-282-global_steps:1008000,grad:7.3099,MSELoss:0.0111,exp_loss:0.2061 +2022-10-17 07:21:02,535-282-global_steps:1009000,grad:7.2989,MSELoss:0.0111,exp_loss:0.2061 +2022-10-17 07:21:54,496-344-epoch:86 +2022-10-17 07:23:16,940-282-global_steps:1010000,grad:7.5868,MSELoss:0.0109,exp_loss:0.2039 +2022-10-17 07:25:33,982-282-global_steps:1011000,grad:7.4632,MSELoss:0.0109,exp_loss:0.2045 +2022-10-17 07:27:42,371-282-global_steps:1012000,grad:7.4763,MSELoss:0.0111,exp_loss:0.2036 +2022-10-17 07:29:46,467-282-global_steps:1013000,grad:7.4583,MSELoss:0.0111,exp_loss:0.2039 +2022-10-17 07:32:00,217-282-global_steps:1014000,grad:7.3918,MSELoss:0.0111,exp_loss:0.2041 +2022-10-17 07:34:14,850-282-global_steps:1015000,grad:7.3610,MSELoss:0.0110,exp_loss:0.2047 +2022-10-17 07:36:31,708-282-global_steps:1016000,grad:7.3689,MSELoss:0.0111,exp_loss:0.2051 +2022-10-17 07:38:46,064-282-global_steps:1017000,grad:7.3866,MSELoss:0.0111,exp_loss:0.2056 +2022-10-17 07:41:01,901-282-global_steps:1018000,grad:7.3645,MSELoss:0.0111,exp_loss:0.2055 +2022-10-17 07:43:17,835-282-global_steps:1019000,grad:7.3361,MSELoss:0.0111,exp_loss:0.2053 +2022-10-17 07:45:38,279-282-global_steps:1020000,grad:7.2990,MSELoss:0.0111,exp_loss:0.2049 +2022-10-17 07:47:50,615-282-global_steps:1021000,grad:7.2870,MSELoss:0.0111,exp_loss:0.2048 +2022-10-17 07:48:07,532-344-epoch:87 +2022-10-17 07:50:06,037-282-global_steps:1022000,grad:7.7297,MSELoss:0.0112,exp_loss:0.2068 +2022-10-17 07:52:26,602-282-global_steps:1023000,grad:7.5077,MSELoss:0.0110,exp_loss:0.2041 +2022-10-17 07:54:40,349-282-global_steps:1024000,grad:7.4085,MSELoss:0.0109,exp_loss:0.2030 +2022-10-17 07:56:53,087-282-global_steps:1025000,grad:7.3980,MSELoss:0.0109,exp_loss:0.2041 +2022-10-17 07:59:13,850-282-global_steps:1026000,grad:7.3696,MSELoss:0.0110,exp_loss:0.2037 +2022-10-17 08:01:22,380-282-global_steps:1027000,grad:7.3360,MSELoss:0.0110,exp_loss:0.2032 +2022-10-17 08:03:25,819-282-global_steps:1028000,grad:7.3165,MSELoss:0.0110,exp_loss:0.2036 +2022-10-17 08:05:35,366-282-global_steps:1029000,grad:7.2878,MSELoss:0.0110,exp_loss:0.2035 +2022-10-17 08:07:41,322-282-global_steps:1030000,grad:7.2984,MSELoss:0.0111,exp_loss:0.2042 +2022-10-17 08:09:49,183-282-global_steps:1031000,grad:7.2695,MSELoss:0.0111,exp_loss:0.2037 +2022-10-17 08:11:54,351-282-global_steps:1032000,grad:7.2435,MSELoss:0.0110,exp_loss:0.2032 +2022-10-17 08:13:33,718-344-epoch:88 +2022-10-17 08:13:52,321-282-global_steps:1033000,grad:7.4086,MSELoss:0.0111,exp_loss:0.2032 +2022-10-17 08:16:02,962-282-global_steps:1034000,grad:7.5147,MSELoss:0.0111,exp_loss:0.2050 +2022-10-17 08:18:07,654-282-global_steps:1035000,grad:7.4749,MSELoss:0.0113,exp_loss:0.2052 +2022-10-17 08:20:12,729-282-global_steps:1036000,grad:7.4609,MSELoss:0.0113,exp_loss:0.2060 +2022-10-17 08:22:17,940-282-global_steps:1037000,grad:7.4159,MSELoss:0.0113,exp_loss:0.2046 +2022-10-17 08:24:21,954-282-global_steps:1038000,grad:7.3553,MSELoss:0.0112,exp_loss:0.2040 +2022-10-17 08:26:36,859-282-global_steps:1039000,grad:7.3223,MSELoss:0.0111,exp_loss:0.2033 +2022-10-17 08:28:53,890-282-global_steps:1040000,grad:7.2943,MSELoss:0.0111,exp_loss:0.2024 +2022-10-17 08:31:08,673-282-global_steps:1041000,grad:7.2917,MSELoss:0.0111,exp_loss:0.2026 +2022-10-17 08:33:22,313-282-global_steps:1042000,grad:7.2838,MSELoss:0.0111,exp_loss:0.2026 +2022-10-17 08:35:38,720-282-global_steps:1043000,grad:7.2649,MSELoss:0.0110,exp_loss:0.2026 +2022-10-17 08:37:53,232-282-global_steps:1044000,grad:7.2511,MSELoss:0.0110,exp_loss:0.2028 +2022-10-17 08:39:14,350-344-epoch:89 +2022-10-17 08:40:07,294-282-global_steps:1045000,grad:7.7450,MSELoss:0.0115,exp_loss:0.2120 +2022-10-17 08:42:14,791-282-global_steps:1046000,grad:7.5068,MSELoss:0.0113,exp_loss:0.2051 +2022-10-17 08:44:21,173-282-global_steps:1047000,grad:7.4931,MSELoss:0.0112,exp_loss:0.2024 +2022-10-17 08:46:32,393-282-global_steps:1048000,grad:7.4267,MSELoss:0.0111,exp_loss:0.2020 +2022-10-17 08:48:34,085-282-global_steps:1049000,grad:7.3951,MSELoss:0.0111,exp_loss:0.2017 +2022-10-17 08:50:46,795-282-global_steps:1050000,grad:7.3583,MSELoss:0.0111,exp_loss:0.2018 +2022-10-17 08:52:58,382-282-global_steps:1051000,grad:7.3191,MSELoss:0.0110,exp_loss:0.2015 +2022-10-17 08:55:10,692-282-global_steps:1052000,grad:7.3054,MSELoss:0.0110,exp_loss:0.2015 +2022-10-17 08:57:28,393-282-global_steps:1053000,grad:7.2805,MSELoss:0.0110,exp_loss:0.2012 +2022-10-17 08:59:36,992-282-global_steps:1054000,grad:7.2859,MSELoss:0.0110,exp_loss:0.2016 +2022-10-17 09:01:43,986-282-global_steps:1055000,grad:7.2654,MSELoss:0.0110,exp_loss:0.2015 +2022-10-17 09:03:47,522-282-global_steps:1056000,grad:7.2374,MSELoss:0.0110,exp_loss:0.2011 +2022-10-17 09:04:30,407-344-epoch:90 +2022-10-17 09:05:57,188-282-global_steps:1057000,grad:7.3776,MSELoss:0.0110,exp_loss:0.2021 +2022-10-17 09:08:09,680-282-global_steps:1058000,grad:7.3095,MSELoss:0.0109,exp_loss:0.2004 +2022-10-17 09:10:12,396-282-global_steps:1059000,grad:7.3060,MSELoss:0.0109,exp_loss:0.2005 +2022-10-17 09:12:14,324-282-global_steps:1060000,grad:7.2509,MSELoss:0.0109,exp_loss:0.2000 +2022-10-17 09:14:18,421-282-global_steps:1061000,grad:7.2337,MSELoss:0.0109,exp_loss:0.2003 +2022-10-17 09:16:28,952-282-global_steps:1062000,grad:7.2361,MSELoss:0.0109,exp_loss:0.2001 +2022-10-17 09:18:20,869-282-global_steps:1063000,grad:7.2311,MSELoss:0.0109,exp_loss:0.1997 +2022-10-17 09:20:07,340-282-global_steps:1064000,grad:7.2357,MSELoss:0.0109,exp_loss:0.2002 +2022-10-17 09:21:49,102-282-global_steps:1065000,grad:7.2239,MSELoss:0.0109,exp_loss:0.2003 +2022-10-17 09:23:51,667-282-global_steps:1066000,grad:7.2116,MSELoss:0.0109,exp_loss:0.2002 +2022-10-17 09:25:57,494-282-global_steps:1067000,grad:7.1850,MSELoss:0.0110,exp_loss:0.2000 +2022-10-17 09:28:11,058-282-global_steps:1068000,grad:7.1695,MSELoss:0.0110,exp_loss:0.2001 +2022-10-17 09:28:19,286-344-epoch:91 +2022-10-17 09:30:19,075-282-global_steps:1069000,grad:7.5980,MSELoss:0.0111,exp_loss:0.2024 +2022-10-17 09:32:29,080-282-global_steps:1070000,grad:7.4675,MSELoss:0.0110,exp_loss:0.2002 +2022-10-17 09:34:34,955-282-global_steps:1071000,grad:7.4075,MSELoss:0.0110,exp_loss:0.2000 +2022-10-17 09:36:40,952-282-global_steps:1072000,grad:7.3757,MSELoss:0.0110,exp_loss:0.2002 +2022-10-17 09:38:48,411-282-global_steps:1073000,grad:7.3375,MSELoss:0.0111,exp_loss:0.2000 +2022-10-17 09:41:00,004-282-global_steps:1074000,grad:7.3322,MSELoss:0.0110,exp_loss:0.1997 +2022-10-17 09:43:10,097-282-global_steps:1075000,grad:7.3285,MSELoss:0.0110,exp_loss:0.2003 +2022-10-17 09:45:13,698-282-global_steps:1076000,grad:7.2902,MSELoss:0.0110,exp_loss:0.2001 +2022-10-17 09:47:12,610-282-global_steps:1077000,grad:7.2634,MSELoss:0.0110,exp_loss:0.1999 +2022-10-17 09:49:20,728-282-global_steps:1078000,grad:7.2384,MSELoss:0.0110,exp_loss:0.1997 +2022-10-17 09:51:32,476-282-global_steps:1079000,grad:7.2038,MSELoss:0.0109,exp_loss:0.1992 +2022-10-17 09:53:21,114-344-epoch:92 +2022-10-17 09:53:44,973-282-global_steps:1080000,grad:7.3285,MSELoss:0.0110,exp_loss:0.1856 +2022-10-17 09:55:42,122-282-global_steps:1081000,grad:7.3948,MSELoss:0.0111,exp_loss:0.2014 +2022-10-17 09:57:43,418-282-global_steps:1082000,grad:7.4168,MSELoss:0.0111,exp_loss:0.2012 +2022-10-17 09:59:47,905-282-global_steps:1083000,grad:7.3707,MSELoss:0.0110,exp_loss:0.2000 +2022-10-17 10:01:48,366-282-global_steps:1084000,grad:7.3413,MSELoss:0.0111,exp_loss:0.2004 +2022-10-17 10:03:56,461-282-global_steps:1085000,grad:7.3060,MSELoss:0.0111,exp_loss:0.1994 +2022-10-17 10:06:02,273-282-global_steps:1086000,grad:7.2493,MSELoss:0.0110,exp_loss:0.1990 +2022-10-17 10:08:09,195-282-global_steps:1087000,grad:7.1908,MSELoss:0.0109,exp_loss:0.1978 +2022-10-17 10:10:06,082-282-global_steps:1088000,grad:7.1955,MSELoss:0.0109,exp_loss:0.1979 +2022-10-17 10:12:05,726-282-global_steps:1089000,grad:7.1770,MSELoss:0.0109,exp_loss:0.1977 +2022-10-17 10:14:07,325-282-global_steps:1090000,grad:7.1555,MSELoss:0.0109,exp_loss:0.1977 +2022-10-17 10:16:03,368-282-global_steps:1091000,grad:7.1454,MSELoss:0.0109,exp_loss:0.1978 +2022-10-17 10:17:11,366-344-epoch:93 +2022-10-17 10:18:14,177-282-global_steps:1092000,grad:7.7738,MSELoss:0.0111,exp_loss:0.2072 +2022-10-17 10:20:21,368-282-global_steps:1093000,grad:7.4997,MSELoss:0.0110,exp_loss:0.2010 +2022-10-17 10:22:37,405-282-global_steps:1094000,grad:7.3662,MSELoss:0.0109,exp_loss:0.1984 +2022-10-17 10:24:46,116-282-global_steps:1095000,grad:7.2717,MSELoss:0.0109,exp_loss:0.1971 +2022-10-17 10:26:57,457-282-global_steps:1096000,grad:7.2492,MSELoss:0.0109,exp_loss:0.1975 +2022-10-17 10:29:08,625-282-global_steps:1097000,grad:7.2154,MSELoss:0.0109,exp_loss:0.1976 +2022-10-17 10:31:21,509-282-global_steps:1098000,grad:7.1907,MSELoss:0.0109,exp_loss:0.1971 +2022-10-17 10:33:31,604-282-global_steps:1099000,grad:7.1533,MSELoss:0.0109,exp_loss:0.1966 +2022-10-17 10:35:35,042-282-global_steps:1100000,grad:7.1246,MSELoss:0.0109,exp_loss:0.1965 +2022-10-17 10:37:43,036-282-global_steps:1101000,grad:7.1247,MSELoss:0.0109,exp_loss:0.1966 +2022-10-17 10:39:51,247-282-global_steps:1102000,grad:7.1288,MSELoss:0.0109,exp_loss:0.1969 +2022-10-17 10:42:01,394-282-global_steps:1103000,grad:7.1126,MSELoss:0.0109,exp_loss:0.1967 +2022-10-17 10:42:37,801-344-epoch:94 +2022-10-17 10:44:14,901-282-global_steps:1104000,grad:7.2576,MSELoss:0.0106,exp_loss:0.1942 +2022-10-17 10:46:25,069-282-global_steps:1105000,grad:7.3322,MSELoss:0.0108,exp_loss:0.1963 +2022-10-17 10:48:38,489-282-global_steps:1106000,grad:7.2744,MSELoss:0.0108,exp_loss:0.1947 +2022-10-17 10:50:38,726-282-global_steps:1107000,grad:7.2517,MSELoss:0.0108,exp_loss:0.1952 +2022-10-17 10:52:48,588-282-global_steps:1108000,grad:7.2325,MSELoss:0.0108,exp_loss:0.1962 +2022-10-17 10:54:57,195-282-global_steps:1109000,grad:7.2237,MSELoss:0.0108,exp_loss:0.1961 +2022-10-17 10:57:09,401-282-global_steps:1110000,grad:7.1970,MSELoss:0.0108,exp_loss:0.1953 +2022-10-17 10:59:12,237-282-global_steps:1111000,grad:7.1803,MSELoss:0.0108,exp_loss:0.1956 +2022-10-17 11:01:23,936-282-global_steps:1112000,grad:7.1485,MSELoss:0.0109,exp_loss:0.1953 +2022-10-17 11:03:30,148-282-global_steps:1113000,grad:7.1307,MSELoss:0.0109,exp_loss:0.1953 +2022-10-17 11:05:41,955-282-global_steps:1114000,grad:7.1207,MSELoss:0.0109,exp_loss:0.1954 +2022-10-17 11:07:56,596-282-global_steps:1115000,grad:7.1030,MSELoss:0.0109,exp_loss:0.1956 +2022-10-17 11:07:58,684-344-epoch:95 +2022-10-17 11:10:04,445-282-global_steps:1116000,grad:7.3173,MSELoss:0.0108,exp_loss:0.1947 +2022-10-17 11:12:10,259-282-global_steps:1117000,grad:7.2406,MSELoss:0.0108,exp_loss:0.1927 +2022-10-17 11:14:16,752-282-global_steps:1118000,grad:7.1688,MSELoss:0.0108,exp_loss:0.1932 +2022-10-17 11:16:25,731-282-global_steps:1119000,grad:7.1370,MSELoss:0.0108,exp_loss:0.1930 +2022-10-17 11:18:34,287-282-global_steps:1120000,grad:7.1399,MSELoss:0.0108,exp_loss:0.1933 +2022-10-17 11:20:31,602-282-global_steps:1121000,grad:7.1501,MSELoss:0.0108,exp_loss:0.1939 +2022-10-17 11:22:32,392-282-global_steps:1122000,grad:7.1157,MSELoss:0.0109,exp_loss:0.1941 +2022-10-17 11:24:27,771-282-global_steps:1123000,grad:7.0971,MSELoss:0.0109,exp_loss:0.1940 +2022-10-17 11:26:19,325-282-global_steps:1124000,grad:7.1021,MSELoss:0.0109,exp_loss:0.1943 +2022-10-17 11:28:28,662-282-global_steps:1125000,grad:7.0987,MSELoss:0.0108,exp_loss:0.1945 +2022-10-17 11:30:39,176-282-global_steps:1126000,grad:7.0956,MSELoss:0.0108,exp_loss:0.1946 +2022-10-17 11:32:18,013-344-epoch:96 +2022-10-17 11:32:49,389-282-global_steps:1127000,grad:7.4396,MSELoss:0.0115,exp_loss:0.1908 +2022-10-17 11:35:00,412-282-global_steps:1128000,grad:7.2298,MSELoss:0.0108,exp_loss:0.1910 +2022-10-17 11:37:13,707-282-global_steps:1129000,grad:7.1804,MSELoss:0.0108,exp_loss:0.1919 +2022-10-17 11:39:22,872-282-global_steps:1130000,grad:7.1750,MSELoss:0.0108,exp_loss:0.1929 +2022-10-17 11:41:33,168-282-global_steps:1131000,grad:7.1714,MSELoss:0.0108,exp_loss:0.1932 +2022-10-17 11:43:38,785-282-global_steps:1132000,grad:7.1386,MSELoss:0.0108,exp_loss:0.1928 +2022-10-17 11:45:46,198-282-global_steps:1133000,grad:7.1333,MSELoss:0.0108,exp_loss:0.1929 +2022-10-17 11:47:40,368-282-global_steps:1134000,grad:7.1295,MSELoss:0.0108,exp_loss:0.1933 +2022-10-17 11:49:21,235-282-global_steps:1135000,grad:7.1174,MSELoss:0.0108,exp_loss:0.1938 +2022-10-17 11:51:08,813-282-global_steps:1136000,grad:7.0774,MSELoss:0.0108,exp_loss:0.1933 +2022-10-17 11:52:53,484-282-global_steps:1137000,grad:7.0775,MSELoss:0.0108,exp_loss:0.1933 +2022-10-17 11:54:55,300-282-global_steps:1138000,grad:7.0578,MSELoss:0.0108,exp_loss:0.1934 +2022-10-17 11:55:56,748-344-epoch:97 +2022-10-17 11:57:03,675-282-global_steps:1139000,grad:7.2237,MSELoss:0.0105,exp_loss:0.1886 +2022-10-17 11:59:06,429-282-global_steps:1140000,grad:7.1386,MSELoss:0.0108,exp_loss:0.1917 +2022-10-17 12:01:05,971-282-global_steps:1141000,grad:7.1736,MSELoss:0.0108,exp_loss:0.1934 +2022-10-17 12:02:56,515-282-global_steps:1142000,grad:7.1402,MSELoss:0.0108,exp_loss:0.1932 +2022-10-17 12:04:50,929-282-global_steps:1143000,grad:7.1058,MSELoss:0.0108,exp_loss:0.1925 +2022-10-17 12:06:43,793-282-global_steps:1144000,grad:7.1152,MSELoss:0.0108,exp_loss:0.1931 +2022-10-17 12:08:27,671-282-global_steps:1145000,grad:7.1007,MSELoss:0.0108,exp_loss:0.1926 +2022-10-17 12:10:11,489-282-global_steps:1146000,grad:7.0730,MSELoss:0.0108,exp_loss:0.1919 +2022-10-17 12:11:52,095-282-global_steps:1147000,grad:7.0781,MSELoss:0.0108,exp_loss:0.1927 +2022-10-17 12:13:39,220-282-global_steps:1148000,grad:7.0480,MSELoss:0.0108,exp_loss:0.1925 +2022-10-17 12:15:23,526-282-global_steps:1149000,grad:7.0132,MSELoss:0.0108,exp_loss:0.1924 +2022-10-17 12:17:05,432-282-global_steps:1150000,grad:7.0056,MSELoss:0.0108,exp_loss:0.1925 +2022-10-17 12:17:29,304-344-epoch:98 +2022-10-17 12:18:43,304-282-global_steps:1151000,grad:7.2564,MSELoss:0.0108,exp_loss:0.1910 +2022-10-17 12:20:28,871-282-global_steps:1152000,grad:7.3831,MSELoss:0.0109,exp_loss:0.1959 +2022-10-17 12:22:29,209-282-global_steps:1153000,grad:7.2848,MSELoss:0.0109,exp_loss:0.1944 +2022-10-17 12:23:54,636-282-global_steps:1154000,grad:7.2245,MSELoss:0.0108,exp_loss:0.1943 +2022-10-17 12:25:20,097-282-global_steps:1155000,grad:7.1809,MSELoss:0.0108,exp_loss:0.1934 +2022-10-17 12:26:45,486-282-global_steps:1156000,grad:7.1482,MSELoss:0.0108,exp_loss:0.1931 +2022-10-17 12:28:10,803-282-global_steps:1157000,grad:7.1047,MSELoss:0.0108,exp_loss:0.1920 +2022-10-17 12:29:36,222-282-global_steps:1158000,grad:7.0917,MSELoss:0.0108,exp_loss:0.1921 +2022-10-17 12:31:01,677-282-global_steps:1159000,grad:7.0779,MSELoss:0.0108,exp_loss:0.1924 +2022-10-17 12:32:27,134-282-global_steps:1160000,grad:7.0438,MSELoss:0.0108,exp_loss:0.1920 +2022-10-17 12:33:52,565-282-global_steps:1161000,grad:7.0127,MSELoss:0.0108,exp_loss:0.1915 +2022-10-17 12:35:14,916-344-epoch:99 +2022-10-17 12:35:18,060-282-global_steps:1162000,grad:7.8344,MSELoss:0.0104,exp_loss:0.1949 +2022-10-17 12:36:43,495-282-global_steps:1163000,grad:7.3755,MSELoss:0.0106,exp_loss:0.1916 +2022-10-17 12:38:17,816-282-global_steps:1164000,grad:7.2409,MSELoss:0.0107,exp_loss:0.1910 +2022-10-17 12:40:32,400-282-global_steps:1165000,grad:7.1859,MSELoss:0.0107,exp_loss:0.1905 +2022-10-17 12:42:45,469-282-global_steps:1166000,grad:7.1636,MSELoss:0.0108,exp_loss:0.1916 +2022-10-17 12:44:57,978-282-global_steps:1167000,grad:7.1535,MSELoss:0.0107,exp_loss:0.1912 +2022-10-17 12:47:09,800-282-global_steps:1168000,grad:7.1277,MSELoss:0.0107,exp_loss:0.1909 +2022-10-17 12:49:18,811-282-global_steps:1169000,grad:7.0862,MSELoss:0.0107,exp_loss:0.1905 +2022-10-17 12:51:28,131-282-global_steps:1170000,grad:7.0694,MSELoss:0.0108,exp_loss:0.1907 +2022-10-17 12:53:38,380-282-global_steps:1171000,grad:7.0588,MSELoss:0.0108,exp_loss:0.1904 +2022-10-17 12:55:42,957-282-global_steps:1172000,grad:7.0377,MSELoss:0.0107,exp_loss:0.1903 +2022-10-17 12:57:46,263-282-global_steps:1173000,grad:7.0203,MSELoss:0.0107,exp_loss:0.1902 diff --git a/experiments/2022-10-31-smplx_S2G-body-vq-3d/smplx_S2G.json b/experiments/2022-10-31-smplx_S2G-body-vq-3d/smplx_S2G.json new file mode 100644 index 0000000000000000000000000000000000000000..c45ce9872c4e1f48c8e16f2bb214c1c51df26063 --- /dev/null +++ b/experiments/2022-10-31-smplx_S2G-body-vq-3d/smplx_S2G.json @@ -0,0 +1,89 @@ +{ + "config_root_path": "/is/cluster/scratch/hyi/ExpressiveBody/SMPLifyX4/scripts", + "dataset_load_mode": "pickle", + "store_file_path": "store.pkl", + "smplx_npz_path": "visualise/smplx_model/SMPLX_NEUTRAL_2020.npz", + "extra_joint_path": "visualise/smplx_model/smplx_extra_joints.yaml", + "j14_regressor_path": "visualise/smplx_model/SMPLX_to_J14.pkl", + "param": { + "w_j": 1, + "w_b": 1, + "w_h": 1 + }, + "Data": { + "data_root": "../../expressive_body-V0.7/", + "pklname": "_3d_ta_body_mfcc_4p.pkl", + "pose": { + "normalization": false, + "convert_to_6d": false, + "norm_method": "all", + "augmentation": false, + "generate_length": 88, + "pre_pose_length": 0, + "pose_dim": 99, + "expression": true + }, + "aud": { + "feat_method": "mfcc", + "aud_feat_dim": 64, + "aud_feat_win_size": null, + "context_info": false + } + }, + "Model": { + "model_type": "body", + "model_name": "s2g_body_vq", + "composition": true, + "code_num": 2048, + "bh_model": false, + "AudioOpt": "Adam", + "encoder_choice": "mfcc", + "operation_kernel": "rnn", + "interaction": "concat", + "rnn_cell": "gru", + "T_layer_norm": true, + "bidirectional": true, + "residual": true, + "use_template": true, + "template_length": 64, + "gan": false, + "separate": true, + "l1_joints": false, + "radianloss": true + }, + "DataLoader": { + "batch_size": 128, + "num_workers": 8 + }, + "Train": { + "epochs": 100, + "max_gradient_norm": 5, + "recon_input": true, + "learning_rate": { + "generator_learning_rate": 1e-4, + "discriminator_learning_rate": 1e-4 + }, + "weights": { + "kl_tolerance": 0.02, + "velocity_length": 10, + "keypoint_loss_weight": 1, + "recon_input_weight": 1, + "kl_loss_weight": 0.2, + "kl_start_weight": 1e-5, + "kl_decay_rate": 0.9995, + "vel_loss_weight": 1, + "vel_start_weight": 1e-5, + "vel_decay_rate": 0.99995, + "r_loss_weight": 1, + "zero_loss_weight": 0, + "gan_loss_weight": 0.1, + "k": 1.0 + } + }, + "Log": { + "save_every": 50, + "print_every": 400, + "name": "body-vq-composition2048" + } +} + \ No newline at end of file diff --git a/experiments/2022-10-31-smplx_S2G-body-vq-3d/train.log b/experiments/2022-10-31-smplx_S2G-body-vq-3d/train.log new file mode 100644 index 0000000000000000000000000000000000000000..5768818281335e119bfef21418e81037c1702ae8 --- /dev/null +++ b/experiments/2022-10-31-smplx_S2G-body-vq-3d/train.log @@ -0,0 +1,811 @@ +2022-10-31 13:48:06,652-275-start_training +2022-10-31 13:48:06,652-278-epoch:0 +2022-10-31 13:50:19,729-215-global_steps:400,b-rec_loss:0.1016,b-velocity_loss:0.0234,b-e_q_loss:0.0079,h-rec_loss:0.1032,h-velocity_loss:0.0348,h-e_q_loss:0.0068 +2022-10-31 13:52:32,925-215-global_steps:800,b-rec_loss:0.0851,b-velocity_loss:0.0176,b-e_q_loss:0.0065,h-rec_loss:0.0915,h-velocity_loss:0.0298,h-e_q_loss:0.0051 +2022-10-31 13:54:46,459-215-global_steps:1200,b-rec_loss:0.0775,b-velocity_loss:0.0151,b-e_q_loss:0.0065,h-rec_loss:0.0861,h-velocity_loss:0.0279,h-e_q_loss:0.0046 +2022-10-31 13:57:00,112-215-global_steps:1600,b-rec_loss:0.0729,b-velocity_loss:0.0136,b-e_q_loss:0.0067,h-rec_loss:0.0823,h-velocity_loss:0.0268,h-e_q_loss:0.0047 +2022-10-31 13:59:14,275-215-global_steps:2000,b-rec_loss:0.0697,b-velocity_loss:0.0126,b-e_q_loss:0.0072,h-rec_loss:0.0795,h-velocity_loss:0.0261,h-e_q_loss:0.0051 +2022-10-31 14:01:28,526-215-global_steps:2400,b-rec_loss:0.0671,b-velocity_loss:0.0119,b-e_q_loss:0.0078,h-rec_loss:0.0773,h-velocity_loss:0.0256,h-e_q_loss:0.0057 +2022-10-31 14:03:42,675-215-global_steps:2800,b-rec_loss:0.0651,b-velocity_loss:0.0113,b-e_q_loss:0.0084,h-rec_loss:0.0755,h-velocity_loss:0.0252,h-e_q_loss:0.0061 +2022-10-31 14:03:56,752-278-epoch:1 +2022-10-31 14:05:57,907-215-global_steps:3200,b-rec_loss:0.0518,b-velocity_loss:0.0074,b-e_q_loss:0.0127,h-rec_loss:0.0644,h-velocity_loss:0.0231,h-e_q_loss:0.0106 +2022-10-31 14:08:12,350-215-global_steps:3600,b-rec_loss:0.0513,b-velocity_loss:0.0072,b-e_q_loss:0.0130,h-rec_loss:0.0639,h-velocity_loss:0.0230,h-e_q_loss:0.0113 +2022-10-31 14:10:27,444-215-global_steps:4000,b-rec_loss:0.0510,b-velocity_loss:0.0070,b-e_q_loss:0.0134,h-rec_loss:0.0632,h-velocity_loss:0.0230,h-e_q_loss:0.0121 +2022-10-31 14:12:42,135-215-global_steps:4400,b-rec_loss:0.0505,b-velocity_loss:0.0069,b-e_q_loss:0.0137,h-rec_loss:0.0626,h-velocity_loss:0.0229,h-e_q_loss:0.0127 +2022-10-31 14:14:56,464-215-global_steps:4800,b-rec_loss:0.0501,b-velocity_loss:0.0068,b-e_q_loss:0.0140,h-rec_loss:0.0620,h-velocity_loss:0.0229,h-e_q_loss:0.0131 +2022-10-31 14:17:10,273-215-global_steps:5200,b-rec_loss:0.0497,b-velocity_loss:0.0067,b-e_q_loss:0.0143,h-rec_loss:0.0615,h-velocity_loss:0.0228,h-e_q_loss:0.0135 +2022-10-31 14:19:24,564-215-global_steps:5600,b-rec_loss:0.0493,b-velocity_loss:0.0066,b-e_q_loss:0.0145,h-rec_loss:0.0610,h-velocity_loss:0.0228,h-e_q_loss:0.0138 +2022-10-31 14:19:52,950-278-epoch:2 +2022-10-31 14:21:41,279-215-global_steps:6000,b-rec_loss:0.0460,b-velocity_loss:0.0059,b-e_q_loss:0.0162,h-rec_loss:0.0575,h-velocity_loss:0.0225,h-e_q_loss:0.0157 +2022-10-31 14:23:54,732-215-global_steps:6400,b-rec_loss:0.0456,b-velocity_loss:0.0058,b-e_q_loss:0.0163,h-rec_loss:0.0573,h-velocity_loss:0.0225,h-e_q_loss:0.0159 +2022-10-31 14:26:09,002-215-global_steps:6800,b-rec_loss:0.0454,b-velocity_loss:0.0058,b-e_q_loss:0.0164,h-rec_loss:0.0571,h-velocity_loss:0.0225,h-e_q_loss:0.0160 +2022-10-31 14:28:22,895-215-global_steps:7200,b-rec_loss:0.0451,b-velocity_loss:0.0057,b-e_q_loss:0.0164,h-rec_loss:0.0568,h-velocity_loss:0.0225,h-e_q_loss:0.0161 +2022-10-31 14:30:37,540-215-global_steps:7600,b-rec_loss:0.0448,b-velocity_loss:0.0056,b-e_q_loss:0.0165,h-rec_loss:0.0566,h-velocity_loss:0.0224,h-e_q_loss:0.0162 +2022-10-31 14:32:51,729-215-global_steps:8000,b-rec_loss:0.0445,b-velocity_loss:0.0056,b-e_q_loss:0.0165,h-rec_loss:0.0564,h-velocity_loss:0.0224,h-e_q_loss:0.0163 +2022-10-31 14:35:05,956-215-global_steps:8400,b-rec_loss:0.0442,b-velocity_loss:0.0056,b-e_q_loss:0.0166,h-rec_loss:0.0562,h-velocity_loss:0.0224,h-e_q_loss:0.0164 +2022-10-31 14:35:48,431-278-epoch:3 +2022-10-31 14:37:22,247-215-global_steps:8800,b-rec_loss:0.0420,b-velocity_loss:0.0052,b-e_q_loss:0.0168,h-rec_loss:0.0547,h-velocity_loss:0.0223,h-e_q_loss:0.0169 +2022-10-31 14:39:36,259-215-global_steps:9200,b-rec_loss:0.0420,b-velocity_loss:0.0052,b-e_q_loss:0.0167,h-rec_loss:0.0543,h-velocity_loss:0.0223,h-e_q_loss:0.0168 +2022-10-31 14:41:49,812-215-global_steps:9600,b-rec_loss:0.0417,b-velocity_loss:0.0051,b-e_q_loss:0.0167,h-rec_loss:0.0541,h-velocity_loss:0.0223,h-e_q_loss:0.0167 +2022-10-31 14:44:04,085-215-global_steps:10000,b-rec_loss:0.0414,b-velocity_loss:0.0051,b-e_q_loss:0.0167,h-rec_loss:0.0539,h-velocity_loss:0.0223,h-e_q_loss:0.0166 +2022-10-31 14:46:19,795-215-global_steps:10400,b-rec_loss:0.0412,b-velocity_loss:0.0051,b-e_q_loss:0.0167,h-rec_loss:0.0540,h-velocity_loss:0.0223,h-e_q_loss:0.0166 +2022-10-31 14:48:37,455-215-global_steps:10800,b-rec_loss:0.0409,b-velocity_loss:0.0050,b-e_q_loss:0.0167,h-rec_loss:0.0541,h-velocity_loss:0.0223,h-e_q_loss:0.0166 +2022-10-31 14:50:52,416-215-global_steps:11200,b-rec_loss:0.0406,b-velocity_loss:0.0050,b-e_q_loss:0.0166,h-rec_loss:0.0541,h-velocity_loss:0.0223,h-e_q_loss:0.0165 +2022-10-31 14:51:48,956-278-epoch:4 +2022-10-31 14:53:07,562-215-global_steps:11600,b-rec_loss:0.0386,b-velocity_loss:0.0047,b-e_q_loss:0.0164,h-rec_loss:0.0525,h-velocity_loss:0.0223,h-e_q_loss:0.0160 +2022-10-31 14:55:23,987-215-global_steps:12000,b-rec_loss:0.0383,b-velocity_loss:0.0047,b-e_q_loss:0.0164,h-rec_loss:0.0525,h-velocity_loss:0.0223,h-e_q_loss:0.0162 +2022-10-31 14:57:39,325-215-global_steps:12400,b-rec_loss:0.0381,b-velocity_loss:0.0047,b-e_q_loss:0.0164,h-rec_loss:0.0523,h-velocity_loss:0.0222,h-e_q_loss:0.0161 +2022-10-31 14:59:54,258-215-global_steps:12800,b-rec_loss:0.0379,b-velocity_loss:0.0047,b-e_q_loss:0.0164,h-rec_loss:0.0521,h-velocity_loss:0.0222,h-e_q_loss:0.0160 +2022-10-31 15:02:09,440-215-global_steps:13200,b-rec_loss:0.0378,b-velocity_loss:0.0047,b-e_q_loss:0.0163,h-rec_loss:0.0518,h-velocity_loss:0.0222,h-e_q_loss:0.0160 +2022-10-31 15:04:24,113-215-global_steps:13600,b-rec_loss:0.0379,b-velocity_loss:0.0048,b-e_q_loss:0.0163,h-rec_loss:0.0517,h-velocity_loss:0.0222,h-e_q_loss:0.0159 +2022-10-31 15:06:39,326-215-global_steps:14000,b-rec_loss:0.0380,b-velocity_loss:0.0048,b-e_q_loss:0.0162,h-rec_loss:0.0515,h-velocity_loss:0.0222,h-e_q_loss:0.0159 +2022-10-31 15:07:49,894-278-epoch:5 +2022-10-31 15:08:55,193-215-global_steps:14400,b-rec_loss:0.0368,b-velocity_loss:0.0046,b-e_q_loss:0.0157,h-rec_loss:0.0504,h-velocity_loss:0.0221,h-e_q_loss:0.0155 +2022-10-31 15:11:10,076-215-global_steps:14800,b-rec_loss:0.0366,b-velocity_loss:0.0046,b-e_q_loss:0.0158,h-rec_loss:0.0501,h-velocity_loss:0.0221,h-e_q_loss:0.0155 +2022-10-31 15:13:24,392-215-global_steps:15200,b-rec_loss:0.0365,b-velocity_loss:0.0046,b-e_q_loss:0.0158,h-rec_loss:0.0499,h-velocity_loss:0.0221,h-e_q_loss:0.0154 +2022-10-31 15:15:40,538-215-global_steps:15600,b-rec_loss:0.0363,b-velocity_loss:0.0045,b-e_q_loss:0.0159,h-rec_loss:0.0498,h-velocity_loss:0.0221,h-e_q_loss:0.0154 +2022-10-31 15:17:54,939-215-global_steps:16000,b-rec_loss:0.0362,b-velocity_loss:0.0045,b-e_q_loss:0.0159,h-rec_loss:0.0497,h-velocity_loss:0.0221,h-e_q_loss:0.0154 +2022-10-31 15:20:10,043-215-global_steps:16400,b-rec_loss:0.0361,b-velocity_loss:0.0045,b-e_q_loss:0.0159,h-rec_loss:0.0496,h-velocity_loss:0.0220,h-e_q_loss:0.0153 +2022-10-31 15:22:26,284-215-global_steps:16800,b-rec_loss:0.0360,b-velocity_loss:0.0044,b-e_q_loss:0.0159,h-rec_loss:0.0494,h-velocity_loss:0.0220,h-e_q_loss:0.0153 +2022-10-31 15:23:51,848-278-epoch:6 +2022-10-31 15:24:42,914-215-global_steps:17200,b-rec_loss:0.0350,b-velocity_loss:0.0043,b-e_q_loss:0.0160,h-rec_loss:0.0489,h-velocity_loss:0.0222,h-e_q_loss:0.0151 +2022-10-31 15:26:56,687-215-global_steps:17600,b-rec_loss:0.0348,b-velocity_loss:0.0042,b-e_q_loss:0.0159,h-rec_loss:0.0485,h-velocity_loss:0.0220,h-e_q_loss:0.0151 +2022-10-31 15:29:09,945-215-global_steps:18000,b-rec_loss:0.0347,b-velocity_loss:0.0042,b-e_q_loss:0.0159,h-rec_loss:0.0484,h-velocity_loss:0.0220,h-e_q_loss:0.0151 +2022-10-31 15:31:24,709-215-global_steps:18400,b-rec_loss:0.0346,b-velocity_loss:0.0042,b-e_q_loss:0.0159,h-rec_loss:0.0483,h-velocity_loss:0.0220,h-e_q_loss:0.0150 +2022-10-31 15:33:39,989-215-global_steps:18800,b-rec_loss:0.0348,b-velocity_loss:0.0043,b-e_q_loss:0.0158,h-rec_loss:0.0482,h-velocity_loss:0.0220,h-e_q_loss:0.0150 +2022-10-31 15:35:54,533-215-global_steps:19200,b-rec_loss:0.0348,b-velocity_loss:0.0043,b-e_q_loss:0.0157,h-rec_loss:0.0481,h-velocity_loss:0.0220,h-e_q_loss:0.0150 +2022-10-31 15:38:09,100-215-global_steps:19600,b-rec_loss:0.0348,b-velocity_loss:0.0043,b-e_q_loss:0.0157,h-rec_loss:0.0479,h-velocity_loss:0.0220,h-e_q_loss:0.0149 +2022-10-31 15:39:48,132-278-epoch:7 +2022-10-31 15:40:25,689-215-global_steps:20000,b-rec_loss:0.0338,b-velocity_loss:0.0041,b-e_q_loss:0.0156,h-rec_loss:0.0473,h-velocity_loss:0.0219,h-e_q_loss:0.0146 +2022-10-31 15:42:40,843-215-global_steps:20400,b-rec_loss:0.0336,b-velocity_loss:0.0041,b-e_q_loss:0.0156,h-rec_loss:0.0472,h-velocity_loss:0.0219,h-e_q_loss:0.0147 +2022-10-31 15:44:54,838-215-global_steps:20800,b-rec_loss:0.0335,b-velocity_loss:0.0041,b-e_q_loss:0.0156,h-rec_loss:0.0471,h-velocity_loss:0.0219,h-e_q_loss:0.0146 +2022-10-31 15:47:09,161-215-global_steps:21200,b-rec_loss:0.0334,b-velocity_loss:0.0041,b-e_q_loss:0.0156,h-rec_loss:0.0470,h-velocity_loss:0.0219,h-e_q_loss:0.0146 +2022-10-31 15:49:24,759-215-global_steps:21600,b-rec_loss:0.0334,b-velocity_loss:0.0041,b-e_q_loss:0.0156,h-rec_loss:0.0469,h-velocity_loss:0.0219,h-e_q_loss:0.0146 +2022-10-31 15:51:39,993-215-global_steps:22000,b-rec_loss:0.0333,b-velocity_loss:0.0041,b-e_q_loss:0.0156,h-rec_loss:0.0469,h-velocity_loss:0.0219,h-e_q_loss:0.0145 +2022-10-31 15:53:55,307-215-global_steps:22400,b-rec_loss:0.0333,b-velocity_loss:0.0041,b-e_q_loss:0.0156,h-rec_loss:0.0468,h-velocity_loss:0.0219,h-e_q_loss:0.0145 +2022-10-31 15:55:49,207-278-epoch:8 +2022-10-31 15:56:12,279-215-global_steps:22800,b-rec_loss:0.0324,b-velocity_loss:0.0040,b-e_q_loss:0.0153,h-rec_loss:0.0468,h-velocity_loss:0.0220,h-e_q_loss:0.0147 +2022-10-31 15:58:29,248-215-global_steps:23200,b-rec_loss:0.0324,b-velocity_loss:0.0040,b-e_q_loss:0.0154,h-rec_loss:0.0475,h-velocity_loss:0.0220,h-e_q_loss:0.0145 +2022-10-31 16:00:46,520-215-global_steps:23600,b-rec_loss:0.0324,b-velocity_loss:0.0040,b-e_q_loss:0.0154,h-rec_loss:0.0468,h-velocity_loss:0.0219,h-e_q_loss:0.0143 +2022-10-31 16:03:02,806-215-global_steps:24000,b-rec_loss:0.0324,b-velocity_loss:0.0040,b-e_q_loss:0.0154,h-rec_loss:0.0465,h-velocity_loss:0.0219,h-e_q_loss:0.0143 +2022-10-31 16:05:18,535-215-global_steps:24400,b-rec_loss:0.0323,b-velocity_loss:0.0040,b-e_q_loss:0.0154,h-rec_loss:0.0463,h-velocity_loss:0.0219,h-e_q_loss:0.0143 +2022-10-31 16:07:36,307-215-global_steps:24800,b-rec_loss:0.0323,b-velocity_loss:0.0040,b-e_q_loss:0.0154,h-rec_loss:0.0462,h-velocity_loss:0.0219,h-e_q_loss:0.0143 +2022-10-31 16:09:55,057-215-global_steps:25200,b-rec_loss:0.0322,b-velocity_loss:0.0040,b-e_q_loss:0.0153,h-rec_loss:0.0461,h-velocity_loss:0.0219,h-e_q_loss:0.0142 +2022-10-31 16:12:05,306-278-epoch:9 +2022-10-31 16:12:13,895-215-global_steps:25600,b-rec_loss:0.0315,b-velocity_loss:0.0039,b-e_q_loss:0.0151,h-rec_loss:0.0452,h-velocity_loss:0.0218,h-e_q_loss:0.0139 +2022-10-31 16:14:29,894-215-global_steps:26000,b-rec_loss:0.0315,b-velocity_loss:0.0039,b-e_q_loss:0.0152,h-rec_loss:0.0452,h-velocity_loss:0.0217,h-e_q_loss:0.0140 +2022-10-31 16:16:47,160-215-global_steps:26400,b-rec_loss:0.0315,b-velocity_loss:0.0039,b-e_q_loss:0.0151,h-rec_loss:0.0451,h-velocity_loss:0.0218,h-e_q_loss:0.0140 +2022-10-31 16:19:04,087-215-global_steps:26800,b-rec_loss:0.0316,b-velocity_loss:0.0039,b-e_q_loss:0.0151,h-rec_loss:0.0451,h-velocity_loss:0.0218,h-e_q_loss:0.0140 +2022-10-31 16:21:19,934-215-global_steps:27200,b-rec_loss:0.0316,b-velocity_loss:0.0039,b-e_q_loss:0.0151,h-rec_loss:0.0450,h-velocity_loss:0.0218,h-e_q_loss:0.0140 +2022-10-31 16:23:36,795-215-global_steps:27600,b-rec_loss:0.0315,b-velocity_loss:0.0039,b-e_q_loss:0.0151,h-rec_loss:0.0449,h-velocity_loss:0.0218,h-e_q_loss:0.0139 +2022-10-31 16:25:54,427-215-global_steps:28000,b-rec_loss:0.0314,b-velocity_loss:0.0039,b-e_q_loss:0.0151,h-rec_loss:0.0449,h-velocity_loss:0.0218,h-e_q_loss:0.0139 +2022-10-31 16:28:12,805-215-global_steps:28400,b-rec_loss:0.0314,b-velocity_loss:0.0039,b-e_q_loss:0.0151,h-rec_loss:0.0448,h-velocity_loss:0.0218,h-e_q_loss:0.0139 +2022-10-31 16:28:20,539-278-epoch:10 +2022-10-31 16:30:30,998-215-global_steps:28800,b-rec_loss:0.0307,b-velocity_loss:0.0038,b-e_q_loss:0.0150,h-rec_loss:0.0443,h-velocity_loss:0.0217,h-e_q_loss:0.0137 +2022-10-31 16:32:45,985-215-global_steps:29200,b-rec_loss:0.0307,b-velocity_loss:0.0038,b-e_q_loss:0.0151,h-rec_loss:0.0443,h-velocity_loss:0.0217,h-e_q_loss:0.0137 +2022-10-31 16:35:02,945-215-global_steps:29600,b-rec_loss:0.0306,b-velocity_loss:0.0038,b-e_q_loss:0.0151,h-rec_loss:0.0443,h-velocity_loss:0.0217,h-e_q_loss:0.0137 +2022-10-31 16:37:19,613-215-global_steps:30000,b-rec_loss:0.0306,b-velocity_loss:0.0038,b-e_q_loss:0.0151,h-rec_loss:0.0442,h-velocity_loss:0.0217,h-e_q_loss:0.0137 +2022-10-31 16:39:35,252-215-global_steps:30400,b-rec_loss:0.0306,b-velocity_loss:0.0038,b-e_q_loss:0.0151,h-rec_loss:0.0442,h-velocity_loss:0.0217,h-e_q_loss:0.0137 +2022-10-31 16:41:52,412-215-global_steps:30800,b-rec_loss:0.0306,b-velocity_loss:0.0038,b-e_q_loss:0.0151,h-rec_loss:0.0441,h-velocity_loss:0.0217,h-e_q_loss:0.0136 +2022-10-31 16:44:13,202-215-global_steps:31200,b-rec_loss:0.0306,b-velocity_loss:0.0038,b-e_q_loss:0.0151,h-rec_loss:0.0441,h-velocity_loss:0.0217,h-e_q_loss:0.0136 +2022-10-31 16:44:34,934-278-epoch:11 +2022-10-31 16:46:34,419-215-global_steps:31600,b-rec_loss:0.0303,b-velocity_loss:0.0038,b-e_q_loss:0.0151,h-rec_loss:0.0438,h-velocity_loss:0.0217,h-e_q_loss:0.0134 +2022-10-31 16:48:52,448-215-global_steps:32000,b-rec_loss:0.0303,b-velocity_loss:0.0038,b-e_q_loss:0.0151,h-rec_loss:0.0437,h-velocity_loss:0.0217,h-e_q_loss:0.0135 +2022-10-31 16:51:10,902-215-global_steps:32400,b-rec_loss:0.0304,b-velocity_loss:0.0038,b-e_q_loss:0.0151,h-rec_loss:0.0436,h-velocity_loss:0.0217,h-e_q_loss:0.0134 +2022-10-31 16:53:30,700-215-global_steps:32800,b-rec_loss:0.0303,b-velocity_loss:0.0038,b-e_q_loss:0.0151,h-rec_loss:0.0435,h-velocity_loss:0.0217,h-e_q_loss:0.0134 +2022-10-31 16:55:50,606-215-global_steps:33200,b-rec_loss:0.0302,b-velocity_loss:0.0038,b-e_q_loss:0.0151,h-rec_loss:0.0435,h-velocity_loss:0.0217,h-e_q_loss:0.0134 +2022-10-31 16:58:07,922-215-global_steps:33600,b-rec_loss:0.0302,b-velocity_loss:0.0038,b-e_q_loss:0.0152,h-rec_loss:0.0435,h-velocity_loss:0.0217,h-e_q_loss:0.0134 +2022-10-31 17:00:25,471-215-global_steps:34000,b-rec_loss:0.0302,b-velocity_loss:0.0038,b-e_q_loss:0.0152,h-rec_loss:0.0434,h-velocity_loss:0.0217,h-e_q_loss:0.0134 +2022-10-31 17:01:01,393-278-epoch:12 +2022-10-31 17:02:47,171-215-global_steps:34400,b-rec_loss:0.0296,b-velocity_loss:0.0037,b-e_q_loss:0.0152,h-rec_loss:0.0430,h-velocity_loss:0.0216,h-e_q_loss:0.0133 +2022-10-31 17:05:07,145-215-global_steps:34800,b-rec_loss:0.0297,b-velocity_loss:0.0037,b-e_q_loss:0.0152,h-rec_loss:0.0430,h-velocity_loss:0.0217,h-e_q_loss:0.0132 +2022-10-31 17:07:25,050-215-global_steps:35200,b-rec_loss:0.0298,b-velocity_loss:0.0037,b-e_q_loss:0.0152,h-rec_loss:0.0431,h-velocity_loss:0.0217,h-e_q_loss:0.0132 +2022-10-31 17:09:42,755-215-global_steps:35600,b-rec_loss:0.0298,b-velocity_loss:0.0037,b-e_q_loss:0.0152,h-rec_loss:0.0430,h-velocity_loss:0.0217,h-e_q_loss:0.0132 +2022-10-31 17:12:04,571-215-global_steps:36000,b-rec_loss:0.0297,b-velocity_loss:0.0037,b-e_q_loss:0.0152,h-rec_loss:0.0430,h-velocity_loss:0.0216,h-e_q_loss:0.0132 +2022-10-31 17:14:24,416-215-global_steps:36400,b-rec_loss:0.0297,b-velocity_loss:0.0037,b-e_q_loss:0.0152,h-rec_loss:0.0429,h-velocity_loss:0.0216,h-e_q_loss:0.0132 +2022-10-31 17:16:42,294-215-global_steps:36800,b-rec_loss:0.0296,b-velocity_loss:0.0037,b-e_q_loss:0.0152,h-rec_loss:0.0429,h-velocity_loss:0.0216,h-e_q_loss:0.0132 +2022-10-31 17:17:32,176-278-epoch:13 +2022-10-31 17:19:00,963-215-global_steps:37200,b-rec_loss:0.0293,b-velocity_loss:0.0036,b-e_q_loss:0.0152,h-rec_loss:0.0425,h-velocity_loss:0.0216,h-e_q_loss:0.0131 +2022-10-31 17:21:21,706-215-global_steps:37600,b-rec_loss:0.0293,b-velocity_loss:0.0037,b-e_q_loss:0.0152,h-rec_loss:0.0425,h-velocity_loss:0.0216,h-e_q_loss:0.0131 +2022-10-31 17:23:42,081-215-global_steps:38000,b-rec_loss:0.0292,b-velocity_loss:0.0037,b-e_q_loss:0.0152,h-rec_loss:0.0424,h-velocity_loss:0.0216,h-e_q_loss:0.0131 +2022-10-31 17:26:01,738-215-global_steps:38400,b-rec_loss:0.0296,b-velocity_loss:0.0038,b-e_q_loss:0.0153,h-rec_loss:0.0424,h-velocity_loss:0.0216,h-e_q_loss:0.0131 +2022-10-31 17:28:20,372-215-global_steps:38800,b-rec_loss:0.0301,b-velocity_loss:0.0038,b-e_q_loss:0.0151,h-rec_loss:0.0424,h-velocity_loss:0.0216,h-e_q_loss:0.0131 +2022-10-31 17:30:38,526-215-global_steps:39200,b-rec_loss:0.0302,b-velocity_loss:0.0038,b-e_q_loss:0.0150,h-rec_loss:0.0423,h-velocity_loss:0.0216,h-e_q_loss:0.0131 +2022-10-31 17:32:58,553-215-global_steps:39600,b-rec_loss:0.0302,b-velocity_loss:0.0038,b-e_q_loss:0.0150,h-rec_loss:0.0423,h-velocity_loss:0.0216,h-e_q_loss:0.0131 +2022-10-31 17:34:06,369-278-epoch:14 +2022-10-31 17:35:21,922-215-global_steps:40000,b-rec_loss:0.0299,b-velocity_loss:0.0037,b-e_q_loss:0.0150,h-rec_loss:0.0419,h-velocity_loss:0.0216,h-e_q_loss:0.0131 +2022-10-31 17:37:39,668-215-global_steps:40400,b-rec_loss:0.0298,b-velocity_loss:0.0037,b-e_q_loss:0.0150,h-rec_loss:0.0419,h-velocity_loss:0.0216,h-e_q_loss:0.0130 +2022-10-31 17:39:55,643-215-global_steps:40800,b-rec_loss:0.0297,b-velocity_loss:0.0037,b-e_q_loss:0.0151,h-rec_loss:0.0419,h-velocity_loss:0.0216,h-e_q_loss:0.0130 +2022-10-31 17:42:14,998-215-global_steps:41200,b-rec_loss:0.0298,b-velocity_loss:0.0037,b-e_q_loss:0.0150,h-rec_loss:0.0419,h-velocity_loss:0.0216,h-e_q_loss:0.0130 +2022-10-31 17:44:34,358-215-global_steps:41600,b-rec_loss:0.0297,b-velocity_loss:0.0037,b-e_q_loss:0.0150,h-rec_loss:0.0418,h-velocity_loss:0.0216,h-e_q_loss:0.0130 +2022-10-31 17:46:54,111-215-global_steps:42000,b-rec_loss:0.0296,b-velocity_loss:0.0037,b-e_q_loss:0.0150,h-rec_loss:0.0418,h-velocity_loss:0.0216,h-e_q_loss:0.0130 +2022-10-31 17:49:11,363-215-global_steps:42400,b-rec_loss:0.0296,b-velocity_loss:0.0037,b-e_q_loss:0.0150,h-rec_loss:0.0418,h-velocity_loss:0.0216,h-e_q_loss:0.0130 +2022-10-31 17:50:30,677-278-epoch:15 +2022-10-31 17:51:31,431-215-global_steps:42800,b-rec_loss:0.0299,b-velocity_loss:0.0037,b-e_q_loss:0.0149,h-rec_loss:0.0416,h-velocity_loss:0.0216,h-e_q_loss:0.0129 +2022-10-31 17:53:50,566-215-global_steps:43200,b-rec_loss:0.0295,b-velocity_loss:0.0037,b-e_q_loss:0.0150,h-rec_loss:0.0416,h-velocity_loss:0.0216,h-e_q_loss:0.0129 +2022-10-31 17:56:11,335-215-global_steps:43600,b-rec_loss:0.0294,b-velocity_loss:0.0037,b-e_q_loss:0.0150,h-rec_loss:0.0416,h-velocity_loss:0.0215,h-e_q_loss:0.0129 +2022-10-31 17:58:27,672-215-global_steps:44000,b-rec_loss:0.0293,b-velocity_loss:0.0036,b-e_q_loss:0.0150,h-rec_loss:0.0415,h-velocity_loss:0.0216,h-e_q_loss:0.0129 +2022-10-31 18:00:44,955-215-global_steps:44400,b-rec_loss:0.0293,b-velocity_loss:0.0036,b-e_q_loss:0.0150,h-rec_loss:0.0415,h-velocity_loss:0.0216,h-e_q_loss:0.0129 +2022-10-31 18:03:04,646-215-global_steps:44800,b-rec_loss:0.0292,b-velocity_loss:0.0036,b-e_q_loss:0.0150,h-rec_loss:0.0415,h-velocity_loss:0.0215,h-e_q_loss:0.0129 +2022-10-31 18:05:25,913-215-global_steps:45200,b-rec_loss:0.0292,b-velocity_loss:0.0036,b-e_q_loss:0.0150,h-rec_loss:0.0414,h-velocity_loss:0.0215,h-e_q_loss:0.0128 +2022-10-31 18:06:59,352-278-epoch:16 +2022-10-31 18:07:45,587-215-global_steps:45600,b-rec_loss:0.0298,b-velocity_loss:0.0038,b-e_q_loss:0.0147,h-rec_loss:0.0410,h-velocity_loss:0.0215,h-e_q_loss:0.0127 +2022-10-31 18:10:02,502-215-global_steps:46000,b-rec_loss:0.0294,b-velocity_loss:0.0037,b-e_q_loss:0.0148,h-rec_loss:0.0412,h-velocity_loss:0.0215,h-e_q_loss:0.0128 +2022-10-31 18:12:21,493-215-global_steps:46400,b-rec_loss:0.0294,b-velocity_loss:0.0037,b-e_q_loss:0.0148,h-rec_loss:0.0415,h-velocity_loss:0.0216,h-e_q_loss:0.0129 +2022-10-31 18:14:42,996-215-global_steps:46800,b-rec_loss:0.0293,b-velocity_loss:0.0037,b-e_q_loss:0.0149,h-rec_loss:0.0415,h-velocity_loss:0.0216,h-e_q_loss:0.0128 +2022-10-31 18:17:03,619-215-global_steps:47200,b-rec_loss:0.0293,b-velocity_loss:0.0037,b-e_q_loss:0.0148,h-rec_loss:0.0414,h-velocity_loss:0.0216,h-e_q_loss:0.0128 +2022-10-31 18:19:19,767-215-global_steps:47600,b-rec_loss:0.0292,b-velocity_loss:0.0037,b-e_q_loss:0.0149,h-rec_loss:0.0413,h-velocity_loss:0.0216,h-e_q_loss:0.0127 +2022-10-31 18:21:37,700-215-global_steps:48000,b-rec_loss:0.0292,b-velocity_loss:0.0037,b-e_q_loss:0.0148,h-rec_loss:0.0413,h-velocity_loss:0.0216,h-e_q_loss:0.0127 +2022-10-31 18:23:27,750-278-epoch:17 +2022-10-31 18:23:58,937-215-global_steps:48400,b-rec_loss:0.0285,b-velocity_loss:0.0036,b-e_q_loss:0.0149,h-rec_loss:0.0408,h-velocity_loss:0.0215,h-e_q_loss:0.0126 +2022-10-31 18:26:17,880-215-global_steps:48800,b-rec_loss:0.0285,b-velocity_loss:0.0036,b-e_q_loss:0.0149,h-rec_loss:0.0408,h-velocity_loss:0.0215,h-e_q_loss:0.0126 +2022-10-31 18:28:35,643-215-global_steps:49200,b-rec_loss:0.0286,b-velocity_loss:0.0036,b-e_q_loss:0.0149,h-rec_loss:0.0407,h-velocity_loss:0.0215,h-e_q_loss:0.0126 +2022-10-31 18:30:54,018-215-global_steps:49600,b-rec_loss:0.0286,b-velocity_loss:0.0036,b-e_q_loss:0.0149,h-rec_loss:0.0407,h-velocity_loss:0.0215,h-e_q_loss:0.0126 +2022-10-31 18:33:13,950-215-global_steps:50000,b-rec_loss:0.0286,b-velocity_loss:0.0036,b-e_q_loss:0.0149,h-rec_loss:0.0407,h-velocity_loss:0.0215,h-e_q_loss:0.0126 +2022-10-31 18:35:33,799-215-global_steps:50400,b-rec_loss:0.0287,b-velocity_loss:0.0036,b-e_q_loss:0.0148,h-rec_loss:0.0407,h-velocity_loss:0.0215,h-e_q_loss:0.0126 +2022-10-31 18:37:50,901-215-global_steps:50800,b-rec_loss:0.0287,b-velocity_loss:0.0036,b-e_q_loss:0.0148,h-rec_loss:0.0407,h-velocity_loss:0.0215,h-e_q_loss:0.0126 +2022-10-31 18:39:54,825-278-epoch:18 +2022-10-31 18:40:11,478-215-global_steps:51200,b-rec_loss:0.0287,b-velocity_loss:0.0036,b-e_q_loss:0.0149,h-rec_loss:0.0403,h-velocity_loss:0.0213,h-e_q_loss:0.0125 +2022-10-31 18:42:32,759-215-global_steps:51600,b-rec_loss:0.0289,b-velocity_loss:0.0036,b-e_q_loss:0.0148,h-rec_loss:0.0406,h-velocity_loss:0.0215,h-e_q_loss:0.0125 +2022-10-31 18:44:51,463-215-global_steps:52000,b-rec_loss:0.0287,b-velocity_loss:0.0036,b-e_q_loss:0.0148,h-rec_loss:0.0406,h-velocity_loss:0.0215,h-e_q_loss:0.0126 +2022-10-31 18:47:08,255-215-global_steps:52400,b-rec_loss:0.0287,b-velocity_loss:0.0036,b-e_q_loss:0.0148,h-rec_loss:0.0405,h-velocity_loss:0.0215,h-e_q_loss:0.0126 +2022-10-31 18:49:26,745-215-global_steps:52800,b-rec_loss:0.0287,b-velocity_loss:0.0036,b-e_q_loss:0.0147,h-rec_loss:0.0405,h-velocity_loss:0.0215,h-e_q_loss:0.0126 +2022-10-31 18:51:46,612-215-global_steps:53200,b-rec_loss:0.0287,b-velocity_loss:0.0036,b-e_q_loss:0.0147,h-rec_loss:0.0405,h-velocity_loss:0.0215,h-e_q_loss:0.0126 +2022-10-31 18:54:06,784-215-global_steps:53600,b-rec_loss:0.0286,b-velocity_loss:0.0036,b-e_q_loss:0.0147,h-rec_loss:0.0405,h-velocity_loss:0.0215,h-e_q_loss:0.0125 +2022-10-31 18:56:23,677-278-epoch:19 +2022-10-31 18:56:25,995-215-global_steps:54000,b-rec_loss:0.0288,b-velocity_loss:0.0036,b-e_q_loss:0.0142,h-rec_loss:0.0411,h-velocity_loss:0.0212,h-e_q_loss:0.0123 +2022-10-31 18:58:44,314-215-global_steps:54400,b-rec_loss:0.0283,b-velocity_loss:0.0036,b-e_q_loss:0.0146,h-rec_loss:0.0409,h-velocity_loss:0.0215,h-e_q_loss:0.0127 +2022-10-31 19:01:06,005-215-global_steps:54800,b-rec_loss:0.0282,b-velocity_loss:0.0035,b-e_q_loss:0.0147,h-rec_loss:0.0409,h-velocity_loss:0.0215,h-e_q_loss:0.0125 +2022-10-31 19:03:25,614-215-global_steps:55200,b-rec_loss:0.0282,b-velocity_loss:0.0035,b-e_q_loss:0.0147,h-rec_loss:0.0407,h-velocity_loss:0.0215,h-e_q_loss:0.0125 +2022-10-31 19:05:42,023-215-global_steps:55600,b-rec_loss:0.0282,b-velocity_loss:0.0035,b-e_q_loss:0.0146,h-rec_loss:0.0405,h-velocity_loss:0.0215,h-e_q_loss:0.0125 +2022-10-31 19:08:00,265-215-global_steps:56000,b-rec_loss:0.0282,b-velocity_loss:0.0036,b-e_q_loss:0.0147,h-rec_loss:0.0405,h-velocity_loss:0.0215,h-e_q_loss:0.0125 +2022-10-31 19:10:19,508-215-global_steps:56400,b-rec_loss:0.0287,b-velocity_loss:0.0037,b-e_q_loss:0.0146,h-rec_loss:0.0404,h-velocity_loss:0.0215,h-e_q_loss:0.0125 +2022-10-31 19:12:38,794-215-global_steps:56800,b-rec_loss:0.0288,b-velocity_loss:0.0037,b-e_q_loss:0.0146,h-rec_loss:0.0403,h-velocity_loss:0.0215,h-e_q_loss:0.0125 +2022-10-31 19:12:52,664-278-epoch:20 +2022-10-31 19:14:57,112-215-global_steps:57200,b-rec_loss:0.0285,b-velocity_loss:0.0036,b-e_q_loss:0.0145,h-rec_loss:0.0400,h-velocity_loss:0.0215,h-e_q_loss:0.0124 +2022-10-31 19:17:14,218-215-global_steps:57600,b-rec_loss:0.0285,b-velocity_loss:0.0036,b-e_q_loss:0.0146,h-rec_loss:0.0400,h-velocity_loss:0.0215,h-e_q_loss:0.0124 +2022-10-31 19:19:35,633-215-global_steps:58000,b-rec_loss:0.0284,b-velocity_loss:0.0036,b-e_q_loss:0.0146,h-rec_loss:0.0399,h-velocity_loss:0.0214,h-e_q_loss:0.0124 +2022-10-31 19:21:54,302-215-global_steps:58400,b-rec_loss:0.0283,b-velocity_loss:0.0036,b-e_q_loss:0.0146,h-rec_loss:0.0399,h-velocity_loss:0.0214,h-e_q_loss:0.0124 +2022-10-31 19:24:13,201-215-global_steps:58800,b-rec_loss:0.0282,b-velocity_loss:0.0036,b-e_q_loss:0.0146,h-rec_loss:0.0399,h-velocity_loss:0.0214,h-e_q_loss:0.0124 +2022-10-31 19:26:31,797-215-global_steps:59200,b-rec_loss:0.0281,b-velocity_loss:0.0036,b-e_q_loss:0.0146,h-rec_loss:0.0399,h-velocity_loss:0.0214,h-e_q_loss:0.0124 +2022-10-31 19:28:51,684-215-global_steps:59600,b-rec_loss:0.0281,b-velocity_loss:0.0035,b-e_q_loss:0.0146,h-rec_loss:0.0399,h-velocity_loss:0.0214,h-e_q_loss:0.0124 +2022-10-31 19:29:21,843-278-epoch:21 +2022-10-31 19:31:12,009-215-global_steps:60000,b-rec_loss:0.0276,b-velocity_loss:0.0035,b-e_q_loss:0.0146,h-rec_loss:0.0398,h-velocity_loss:0.0215,h-e_q_loss:0.0124 +2022-10-31 19:33:28,216-215-global_steps:60400,b-rec_loss:0.0276,b-velocity_loss:0.0035,b-e_q_loss:0.0145,h-rec_loss:0.0399,h-velocity_loss:0.0215,h-e_q_loss:0.0123 +2022-10-31 19:35:46,351-215-global_steps:60800,b-rec_loss:0.0275,b-velocity_loss:0.0035,b-e_q_loss:0.0145,h-rec_loss:0.0398,h-velocity_loss:0.0214,h-e_q_loss:0.0123 +2022-10-31 19:38:05,326-215-global_steps:61200,b-rec_loss:0.0276,b-velocity_loss:0.0035,b-e_q_loss:0.0145,h-rec_loss:0.0398,h-velocity_loss:0.0214,h-e_q_loss:0.0123 +2022-10-31 19:40:24,301-215-global_steps:61600,b-rec_loss:0.0276,b-velocity_loss:0.0035,b-e_q_loss:0.0145,h-rec_loss:0.0397,h-velocity_loss:0.0214,h-e_q_loss:0.0123 +2022-10-31 19:42:41,859-215-global_steps:62000,b-rec_loss:0.0275,b-velocity_loss:0.0035,b-e_q_loss:0.0145,h-rec_loss:0.0397,h-velocity_loss:0.0214,h-e_q_loss:0.0123 +2022-10-31 19:44:59,302-215-global_steps:62400,b-rec_loss:0.0275,b-velocity_loss:0.0035,b-e_q_loss:0.0145,h-rec_loss:0.0397,h-velocity_loss:0.0214,h-e_q_loss:0.0123 +2022-10-31 19:45:42,305-278-epoch:22 +2022-10-31 19:47:18,818-215-global_steps:62800,b-rec_loss:0.0275,b-velocity_loss:0.0035,b-e_q_loss:0.0143,h-rec_loss:0.0394,h-velocity_loss:0.0213,h-e_q_loss:0.0123 +2022-10-31 19:49:37,083-215-global_steps:63200,b-rec_loss:0.0274,b-velocity_loss:0.0035,b-e_q_loss:0.0143,h-rec_loss:0.0394,h-velocity_loss:0.0213,h-e_q_loss:0.0123 +2022-10-31 19:51:54,868-215-global_steps:63600,b-rec_loss:0.0273,b-velocity_loss:0.0035,b-e_q_loss:0.0144,h-rec_loss:0.0395,h-velocity_loss:0.0214,h-e_q_loss:0.0123 +2022-10-31 19:54:12,961-215-global_steps:64000,b-rec_loss:0.0273,b-velocity_loss:0.0035,b-e_q_loss:0.0144,h-rec_loss:0.0395,h-velocity_loss:0.0214,h-e_q_loss:0.0123 +2022-10-31 19:56:32,153-215-global_steps:64400,b-rec_loss:0.0272,b-velocity_loss:0.0035,b-e_q_loss:0.0144,h-rec_loss:0.0394,h-velocity_loss:0.0214,h-e_q_loss:0.0123 +2022-10-31 19:58:53,011-215-global_steps:64800,b-rec_loss:0.0272,b-velocity_loss:0.0035,b-e_q_loss:0.0144,h-rec_loss:0.0395,h-velocity_loss:0.0214,h-e_q_loss:0.0123 +2022-10-31 20:01:11,098-215-global_steps:65200,b-rec_loss:0.0272,b-velocity_loss:0.0035,b-e_q_loss:0.0144,h-rec_loss:0.0395,h-velocity_loss:0.0214,h-e_q_loss:0.0123 +2022-10-31 20:02:08,804-278-epoch:23 +2022-10-31 20:03:30,465-215-global_steps:65600,b-rec_loss:0.0269,b-velocity_loss:0.0035,b-e_q_loss:0.0143,h-rec_loss:0.0396,h-velocity_loss:0.0214,h-e_q_loss:0.0122 +2022-10-31 20:05:49,040-215-global_steps:66000,b-rec_loss:0.0269,b-velocity_loss:0.0034,b-e_q_loss:0.0144,h-rec_loss:0.0395,h-velocity_loss:0.0214,h-e_q_loss:0.0122 +2022-10-31 20:08:08,482-215-global_steps:66400,b-rec_loss:0.0269,b-velocity_loss:0.0034,b-e_q_loss:0.0144,h-rec_loss:0.0394,h-velocity_loss:0.0214,h-e_q_loss:0.0122 +2022-10-31 20:10:25,592-215-global_steps:66800,b-rec_loss:0.0269,b-velocity_loss:0.0034,b-e_q_loss:0.0144,h-rec_loss:0.0394,h-velocity_loss:0.0214,h-e_q_loss:0.0122 +2022-10-31 20:12:42,871-215-global_steps:67200,b-rec_loss:0.0269,b-velocity_loss:0.0034,b-e_q_loss:0.0144,h-rec_loss:0.0394,h-velocity_loss:0.0214,h-e_q_loss:0.0122 +2022-10-31 20:15:02,537-215-global_steps:67600,b-rec_loss:0.0269,b-velocity_loss:0.0034,b-e_q_loss:0.0143,h-rec_loss:0.0393,h-velocity_loss:0.0214,h-e_q_loss:0.0122 +2022-10-31 20:17:23,025-215-global_steps:68000,b-rec_loss:0.0268,b-velocity_loss:0.0034,b-e_q_loss:0.0143,h-rec_loss:0.0393,h-velocity_loss:0.0214,h-e_q_loss:0.0122 +2022-10-31 20:18:34,664-278-epoch:24 +2022-10-31 20:19:40,919-215-global_steps:68400,b-rec_loss:0.0266,b-velocity_loss:0.0034,b-e_q_loss:0.0143,h-rec_loss:0.0390,h-velocity_loss:0.0214,h-e_q_loss:0.0121 +2022-10-31 20:21:58,503-215-global_steps:68800,b-rec_loss:0.0266,b-velocity_loss:0.0034,b-e_q_loss:0.0143,h-rec_loss:0.0390,h-velocity_loss:0.0213,h-e_q_loss:0.0121 +2022-10-31 20:24:17,667-215-global_steps:69200,b-rec_loss:0.0266,b-velocity_loss:0.0034,b-e_q_loss:0.0143,h-rec_loss:0.0390,h-velocity_loss:0.0213,h-e_q_loss:0.0122 +2022-10-31 20:26:35,438-215-global_steps:69600,b-rec_loss:0.0266,b-velocity_loss:0.0034,b-e_q_loss:0.0142,h-rec_loss:0.0390,h-velocity_loss:0.0213,h-e_q_loss:0.0122 +2022-10-31 20:28:54,890-215-global_steps:70000,b-rec_loss:0.0266,b-velocity_loss:0.0034,b-e_q_loss:0.0143,h-rec_loss:0.0390,h-velocity_loss:0.0213,h-e_q_loss:0.0122 +2022-10-31 20:31:12,460-215-global_steps:70400,b-rec_loss:0.0266,b-velocity_loss:0.0034,b-e_q_loss:0.0143,h-rec_loss:0.0390,h-velocity_loss:0.0213,h-e_q_loss:0.0122 +2022-10-31 20:33:31,176-215-global_steps:70800,b-rec_loss:0.0266,b-velocity_loss:0.0034,b-e_q_loss:0.0143,h-rec_loss:0.0390,h-velocity_loss:0.0213,h-e_q_loss:0.0122 +2022-10-31 20:35:00,774-278-epoch:25 +2022-10-31 20:35:53,144-215-global_steps:71200,b-rec_loss:0.0264,b-velocity_loss:0.0034,b-e_q_loss:0.0142,h-rec_loss:0.0390,h-velocity_loss:0.0214,h-e_q_loss:0.0121 +2022-10-31 20:38:11,109-215-global_steps:71600,b-rec_loss:0.0263,b-velocity_loss:0.0034,b-e_q_loss:0.0141,h-rec_loss:0.0389,h-velocity_loss:0.0213,h-e_q_loss:0.0121 +2022-10-31 20:40:28,230-215-global_steps:72000,b-rec_loss:0.0263,b-velocity_loss:0.0034,b-e_q_loss:0.0142,h-rec_loss:0.0389,h-velocity_loss:0.0213,h-e_q_loss:0.0121 +2022-10-31 20:42:48,278-215-global_steps:72400,b-rec_loss:0.0263,b-velocity_loss:0.0034,b-e_q_loss:0.0142,h-rec_loss:0.0388,h-velocity_loss:0.0213,h-e_q_loss:0.0121 +2022-10-31 20:45:07,695-215-global_steps:72800,b-rec_loss:0.0263,b-velocity_loss:0.0034,b-e_q_loss:0.0142,h-rec_loss:0.0388,h-velocity_loss:0.0213,h-e_q_loss:0.0121 +2022-10-31 20:47:26,897-215-global_steps:73200,b-rec_loss:0.0263,b-velocity_loss:0.0034,b-e_q_loss:0.0142,h-rec_loss:0.0388,h-velocity_loss:0.0213,h-e_q_loss:0.0121 +2022-10-31 20:49:43,809-215-global_steps:73600,b-rec_loss:0.0264,b-velocity_loss:0.0034,b-e_q_loss:0.0142,h-rec_loss:0.0388,h-velocity_loss:0.0213,h-e_q_loss:0.0122 +2022-10-31 20:51:26,255-278-epoch:26 +2022-10-31 20:52:04,611-215-global_steps:74000,b-rec_loss:0.0260,b-velocity_loss:0.0034,b-e_q_loss:0.0140,h-rec_loss:0.0399,h-velocity_loss:0.0214,h-e_q_loss:0.0123 +2022-10-31 20:54:24,019-215-global_steps:74400,b-rec_loss:0.0261,b-velocity_loss:0.0034,b-e_q_loss:0.0141,h-rec_loss:0.0395,h-velocity_loss:0.0214,h-e_q_loss:0.0122 +2022-10-31 20:56:43,569-215-global_steps:74800,b-rec_loss:0.0261,b-velocity_loss:0.0034,b-e_q_loss:0.0141,h-rec_loss:0.0394,h-velocity_loss:0.0214,h-e_q_loss:0.0121 +2022-10-31 20:59:00,795-215-global_steps:75200,b-rec_loss:0.0262,b-velocity_loss:0.0034,b-e_q_loss:0.0141,h-rec_loss:0.0393,h-velocity_loss:0.0214,h-e_q_loss:0.0121 +2022-10-31 21:01:18,208-215-global_steps:75600,b-rec_loss:0.0263,b-velocity_loss:0.0034,b-e_q_loss:0.0141,h-rec_loss:0.0392,h-velocity_loss:0.0214,h-e_q_loss:0.0121 +2022-10-31 21:03:36,571-215-global_steps:76000,b-rec_loss:0.0262,b-velocity_loss:0.0034,b-e_q_loss:0.0141,h-rec_loss:0.0392,h-velocity_loss:0.0214,h-e_q_loss:0.0121 +2022-10-31 21:05:55,806-215-global_steps:76400,b-rec_loss:0.0262,b-velocity_loss:0.0034,b-e_q_loss:0.0141,h-rec_loss:0.0392,h-velocity_loss:0.0214,h-e_q_loss:0.0121 +2022-10-31 21:07:50,816-278-epoch:27 +2022-10-31 21:08:14,866-215-global_steps:76800,b-rec_loss:0.0263,b-velocity_loss:0.0034,b-e_q_loss:0.0144,h-rec_loss:0.0387,h-velocity_loss:0.0213,h-e_q_loss:0.0119 +2022-10-31 21:10:30,833-215-global_steps:77200,b-rec_loss:0.0260,b-velocity_loss:0.0034,b-e_q_loss:0.0141,h-rec_loss:0.0389,h-velocity_loss:0.0214,h-e_q_loss:0.0120 +2022-10-31 21:12:49,186-215-global_steps:77600,b-rec_loss:0.0259,b-velocity_loss:0.0034,b-e_q_loss:0.0141,h-rec_loss:0.0390,h-velocity_loss:0.0213,h-e_q_loss:0.0120 +2022-10-31 21:15:07,788-215-global_steps:78000,b-rec_loss:0.0259,b-velocity_loss:0.0034,b-e_q_loss:0.0141,h-rec_loss:0.0390,h-velocity_loss:0.0213,h-e_q_loss:0.0120 +2022-10-31 21:17:26,433-215-global_steps:78400,b-rec_loss:0.0258,b-velocity_loss:0.0034,b-e_q_loss:0.0141,h-rec_loss:0.0389,h-velocity_loss:0.0213,h-e_q_loss:0.0119 +2022-10-31 21:19:43,887-215-global_steps:78800,b-rec_loss:0.0258,b-velocity_loss:0.0034,b-e_q_loss:0.0141,h-rec_loss:0.0390,h-velocity_loss:0.0213,h-e_q_loss:0.0120 +2022-10-31 21:22:01,349-215-global_steps:79200,b-rec_loss:0.0258,b-velocity_loss:0.0034,b-e_q_loss:0.0141,h-rec_loss:0.0389,h-velocity_loss:0.0213,h-e_q_loss:0.0119 +2022-10-31 21:24:11,417-278-epoch:28 +2022-10-31 21:24:21,025-215-global_steps:79600,b-rec_loss:0.0254,b-velocity_loss:0.0034,b-e_q_loss:0.0138,h-rec_loss:0.0407,h-velocity_loss:0.0214,h-e_q_loss:0.0120 +2022-10-31 21:26:38,668-215-global_steps:80000,b-rec_loss:0.0256,b-velocity_loss:0.0034,b-e_q_loss:0.0139,h-rec_loss:0.0395,h-velocity_loss:0.0213,h-e_q_loss:0.0119 +2022-10-31 21:28:56,008-215-global_steps:80400,b-rec_loss:0.0256,b-velocity_loss:0.0034,b-e_q_loss:0.0139,h-rec_loss:0.0391,h-velocity_loss:0.0213,h-e_q_loss:0.0118 +2022-10-31 21:31:14,039-215-global_steps:80800,b-rec_loss:0.0256,b-velocity_loss:0.0034,b-e_q_loss:0.0139,h-rec_loss:0.0389,h-velocity_loss:0.0213,h-e_q_loss:0.0118 +2022-10-31 21:33:34,436-215-global_steps:81200,b-rec_loss:0.0255,b-velocity_loss:0.0034,b-e_q_loss:0.0139,h-rec_loss:0.0389,h-velocity_loss:0.0213,h-e_q_loss:0.0118 +2022-10-31 21:35:50,899-215-global_steps:81600,b-rec_loss:0.0255,b-velocity_loss:0.0033,b-e_q_loss:0.0139,h-rec_loss:0.0388,h-velocity_loss:0.0213,h-e_q_loss:0.0118 +2022-10-31 21:38:07,732-215-global_steps:82000,b-rec_loss:0.0255,b-velocity_loss:0.0033,b-e_q_loss:0.0139,h-rec_loss:0.0387,h-velocity_loss:0.0213,h-e_q_loss:0.0118 +2022-10-31 21:40:23,855-215-global_steps:82400,b-rec_loss:0.0255,b-velocity_loss:0.0033,b-e_q_loss:0.0139,h-rec_loss:0.0387,h-velocity_loss:0.0213,h-e_q_loss:0.0118 +2022-10-31 21:40:30,665-278-epoch:29 +2022-10-31 21:42:43,711-215-global_steps:82800,b-rec_loss:0.0254,b-velocity_loss:0.0033,b-e_q_loss:0.0138,h-rec_loss:0.0385,h-velocity_loss:0.0213,h-e_q_loss:0.0118 +2022-10-31 21:45:02,665-215-global_steps:83200,b-rec_loss:0.0254,b-velocity_loss:0.0033,b-e_q_loss:0.0138,h-rec_loss:0.0385,h-velocity_loss:0.0213,h-e_q_loss:0.0118 +2022-10-31 21:47:19,555-215-global_steps:83600,b-rec_loss:0.0254,b-velocity_loss:0.0033,b-e_q_loss:0.0138,h-rec_loss:0.0385,h-velocity_loss:0.0213,h-e_q_loss:0.0118 +2022-10-31 21:49:39,101-215-global_steps:84000,b-rec_loss:0.0253,b-velocity_loss:0.0033,b-e_q_loss:0.0138,h-rec_loss:0.0385,h-velocity_loss:0.0213,h-e_q_loss:0.0118 +2022-10-31 21:51:58,186-215-global_steps:84400,b-rec_loss:0.0255,b-velocity_loss:0.0034,b-e_q_loss:0.0138,h-rec_loss:0.0385,h-velocity_loss:0.0213,h-e_q_loss:0.0118 +2022-10-31 21:54:17,361-215-global_steps:84800,b-rec_loss:0.0257,b-velocity_loss:0.0034,b-e_q_loss:0.0138,h-rec_loss:0.0385,h-velocity_loss:0.0213,h-e_q_loss:0.0118 +2022-10-31 21:56:33,820-215-global_steps:85200,b-rec_loss:0.0256,b-velocity_loss:0.0034,b-e_q_loss:0.0138,h-rec_loss:0.0385,h-velocity_loss:0.0213,h-e_q_loss:0.0118 +2022-10-31 21:56:55,435-278-epoch:30 +2022-10-31 21:58:52,353-215-global_steps:85600,b-rec_loss:0.0252,b-velocity_loss:0.0033,b-e_q_loss:0.0137,h-rec_loss:0.0383,h-velocity_loss:0.0213,h-e_q_loss:0.0118 +2022-10-31 22:01:11,266-215-global_steps:86000,b-rec_loss:0.0252,b-velocity_loss:0.0033,b-e_q_loss:0.0138,h-rec_loss:0.0382,h-velocity_loss:0.0213,h-e_q_loss:0.0118 +2022-10-31 22:03:29,859-215-global_steps:86400,b-rec_loss:0.0252,b-velocity_loss:0.0033,b-e_q_loss:0.0137,h-rec_loss:0.0382,h-velocity_loss:0.0213,h-e_q_loss:0.0118 +2022-10-31 22:05:47,343-215-global_steps:86800,b-rec_loss:0.0252,b-velocity_loss:0.0033,b-e_q_loss:0.0137,h-rec_loss:0.0382,h-velocity_loss:0.0213,h-e_q_loss:0.0118 +2022-10-31 22:08:04,210-215-global_steps:87200,b-rec_loss:0.0252,b-velocity_loss:0.0033,b-e_q_loss:0.0137,h-rec_loss:0.0383,h-velocity_loss:0.0213,h-e_q_loss:0.0118 +2022-10-31 22:10:24,417-215-global_steps:87600,b-rec_loss:0.0252,b-velocity_loss:0.0033,b-e_q_loss:0.0137,h-rec_loss:0.0383,h-velocity_loss:0.0213,h-e_q_loss:0.0118 +2022-10-31 22:12:41,877-215-global_steps:88000,b-rec_loss:0.0251,b-velocity_loss:0.0033,b-e_q_loss:0.0137,h-rec_loss:0.0383,h-velocity_loss:0.0213,h-e_q_loss:0.0118 +2022-10-31 22:13:16,743-278-epoch:31 +2022-10-31 22:14:59,995-215-global_steps:88400,b-rec_loss:0.0249,b-velocity_loss:0.0033,b-e_q_loss:0.0136,h-rec_loss:0.0380,h-velocity_loss:0.0212,h-e_q_loss:0.0117 +2022-10-31 22:17:18,255-215-global_steps:88800,b-rec_loss:0.0250,b-velocity_loss:0.0033,b-e_q_loss:0.0136,h-rec_loss:0.0382,h-velocity_loss:0.0212,h-e_q_loss:0.0117 +2022-10-31 22:19:38,515-215-global_steps:89200,b-rec_loss:0.0249,b-velocity_loss:0.0033,b-e_q_loss:0.0136,h-rec_loss:0.0381,h-velocity_loss:0.0212,h-e_q_loss:0.0117 +2022-10-31 22:21:57,446-215-global_steps:89600,b-rec_loss:0.0251,b-velocity_loss:0.0033,b-e_q_loss:0.0136,h-rec_loss:0.0381,h-velocity_loss:0.0212,h-e_q_loss:0.0117 +2022-10-31 22:24:16,606-215-global_steps:90000,b-rec_loss:0.0251,b-velocity_loss:0.0033,b-e_q_loss:0.0136,h-rec_loss:0.0381,h-velocity_loss:0.0212,h-e_q_loss:0.0117 +2022-10-31 22:26:35,518-215-global_steps:90400,b-rec_loss:0.0251,b-velocity_loss:0.0033,b-e_q_loss:0.0136,h-rec_loss:0.0381,h-velocity_loss:0.0213,h-e_q_loss:0.0117 +2022-10-31 22:28:55,953-215-global_steps:90800,b-rec_loss:0.0250,b-velocity_loss:0.0033,b-e_q_loss:0.0136,h-rec_loss:0.0381,h-velocity_loss:0.0213,h-e_q_loss:0.0117 +2022-10-31 22:29:47,327-278-epoch:32 +2022-10-31 22:31:16,894-215-global_steps:91200,b-rec_loss:0.0248,b-velocity_loss:0.0033,b-e_q_loss:0.0135,h-rec_loss:0.0379,h-velocity_loss:0.0213,h-e_q_loss:0.0117 +2022-10-31 22:33:35,141-215-global_steps:91600,b-rec_loss:0.0248,b-velocity_loss:0.0033,b-e_q_loss:0.0136,h-rec_loss:0.0380,h-velocity_loss:0.0213,h-e_q_loss:0.0117 +2022-10-31 22:35:53,882-215-global_steps:92000,b-rec_loss:0.0247,b-velocity_loss:0.0033,b-e_q_loss:0.0135,h-rec_loss:0.0380,h-velocity_loss:0.0213,h-e_q_loss:0.0117 +2022-10-31 22:38:11,669-215-global_steps:92400,b-rec_loss:0.0247,b-velocity_loss:0.0033,b-e_q_loss:0.0135,h-rec_loss:0.0379,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-10-31 22:40:27,976-215-global_steps:92800,b-rec_loss:0.0247,b-velocity_loss:0.0033,b-e_q_loss:0.0135,h-rec_loss:0.0379,h-velocity_loss:0.0212,h-e_q_loss:0.0117 +2022-10-31 22:42:45,962-215-global_steps:93200,b-rec_loss:0.0247,b-velocity_loss:0.0033,b-e_q_loss:0.0135,h-rec_loss:0.0379,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-10-31 22:45:03,195-215-global_steps:93600,b-rec_loss:0.0247,b-velocity_loss:0.0033,b-e_q_loss:0.0135,h-rec_loss:0.0379,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-10-31 22:46:07,414-278-epoch:33 +2022-10-31 22:47:22,206-215-global_steps:94000,b-rec_loss:0.0259,b-velocity_loss:0.0034,b-e_q_loss:0.0136,h-rec_loss:0.0383,h-velocity_loss:0.0213,h-e_q_loss:0.0117 +2022-10-31 22:49:39,208-215-global_steps:94400,b-rec_loss:0.0252,b-velocity_loss:0.0033,b-e_q_loss:0.0135,h-rec_loss:0.0380,h-velocity_loss:0.0213,h-e_q_loss:0.0116 +2022-10-31 22:51:55,646-215-global_steps:94800,b-rec_loss:0.0250,b-velocity_loss:0.0033,b-e_q_loss:0.0135,h-rec_loss:0.0379,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-10-31 22:54:15,356-215-global_steps:95200,b-rec_loss:0.0249,b-velocity_loss:0.0033,b-e_q_loss:0.0135,h-rec_loss:0.0378,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-10-31 22:56:33,803-215-global_steps:95600,b-rec_loss:0.0248,b-velocity_loss:0.0033,b-e_q_loss:0.0134,h-rec_loss:0.0378,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-10-31 22:58:51,328-215-global_steps:96000,b-rec_loss:0.0248,b-velocity_loss:0.0033,b-e_q_loss:0.0134,h-rec_loss:0.0378,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-10-31 23:01:08,966-215-global_steps:96400,b-rec_loss:0.0247,b-velocity_loss:0.0033,b-e_q_loss:0.0134,h-rec_loss:0.0378,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-10-31 23:02:28,429-278-epoch:34 +2022-10-31 23:03:30,556-215-global_steps:96800,b-rec_loss:0.0244,b-velocity_loss:0.0033,b-e_q_loss:0.0134,h-rec_loss:0.0379,h-velocity_loss:0.0213,h-e_q_loss:0.0117 +2022-10-31 23:05:48,725-215-global_steps:97200,b-rec_loss:0.0244,b-velocity_loss:0.0033,b-e_q_loss:0.0133,h-rec_loss:0.0377,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-10-31 23:08:05,692-215-global_steps:97600,b-rec_loss:0.0244,b-velocity_loss:0.0033,b-e_q_loss:0.0133,h-rec_loss:0.0377,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-10-31 23:10:22,772-215-global_steps:98000,b-rec_loss:0.0244,b-velocity_loss:0.0033,b-e_q_loss:0.0133,h-rec_loss:0.0377,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-10-31 23:12:45,213-215-global_steps:98400,b-rec_loss:0.0244,b-velocity_loss:0.0033,b-e_q_loss:0.0133,h-rec_loss:0.0376,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-10-31 23:15:04,936-215-global_steps:98800,b-rec_loss:0.0244,b-velocity_loss:0.0033,b-e_q_loss:0.0133,h-rec_loss:0.0376,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-10-31 23:17:23,169-215-global_steps:99200,b-rec_loss:0.0244,b-velocity_loss:0.0033,b-e_q_loss:0.0133,h-rec_loss:0.0376,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-10-31 23:18:56,452-278-epoch:35 +2022-10-31 23:19:43,346-215-global_steps:99600,b-rec_loss:0.0252,b-velocity_loss:0.0034,b-e_q_loss:0.0134,h-rec_loss:0.0374,h-velocity_loss:0.0212,h-e_q_loss:0.0115 +2022-10-31 23:22:04,149-215-global_steps:100000,b-rec_loss:0.0246,b-velocity_loss:0.0033,b-e_q_loss:0.0133,h-rec_loss:0.0374,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-10-31 23:24:24,322-215-global_steps:100400,b-rec_loss:0.0244,b-velocity_loss:0.0033,b-e_q_loss:0.0133,h-rec_loss:0.0374,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-10-31 23:26:43,195-215-global_steps:100800,b-rec_loss:0.0244,b-velocity_loss:0.0033,b-e_q_loss:0.0133,h-rec_loss:0.0375,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-10-31 23:29:01,398-215-global_steps:101200,b-rec_loss:0.0244,b-velocity_loss:0.0033,b-e_q_loss:0.0132,h-rec_loss:0.0376,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-10-31 23:31:21,694-215-global_steps:101600,b-rec_loss:0.0244,b-velocity_loss:0.0033,b-e_q_loss:0.0132,h-rec_loss:0.0376,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-10-31 23:33:41,983-215-global_steps:102000,b-rec_loss:0.0243,b-velocity_loss:0.0033,b-e_q_loss:0.0132,h-rec_loss:0.0376,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-10-31 23:35:28,513-278-epoch:36 +2022-10-31 23:35:59,562-215-global_steps:102400,b-rec_loss:0.0240,b-velocity_loss:0.0033,b-e_q_loss:0.0131,h-rec_loss:0.0374,h-velocity_loss:0.0212,h-e_q_loss:0.0115 +2022-10-31 23:38:19,383-215-global_steps:102800,b-rec_loss:0.0240,b-velocity_loss:0.0033,b-e_q_loss:0.0131,h-rec_loss:0.0372,h-velocity_loss:0.0211,h-e_q_loss:0.0115 +2022-10-31 23:40:38,975-215-global_steps:103200,b-rec_loss:0.0240,b-velocity_loss:0.0033,b-e_q_loss:0.0131,h-rec_loss:0.0373,h-velocity_loss:0.0212,h-e_q_loss:0.0115 +2022-10-31 23:42:57,516-215-global_steps:103600,b-rec_loss:0.0240,b-velocity_loss:0.0033,b-e_q_loss:0.0131,h-rec_loss:0.0373,h-velocity_loss:0.0212,h-e_q_loss:0.0115 +2022-10-31 23:45:13,975-215-global_steps:104000,b-rec_loss:0.0241,b-velocity_loss:0.0033,b-e_q_loss:0.0131,h-rec_loss:0.0373,h-velocity_loss:0.0212,h-e_q_loss:0.0115 +2022-10-31 23:47:34,435-215-global_steps:104400,b-rec_loss:0.0241,b-velocity_loss:0.0033,b-e_q_loss:0.0131,h-rec_loss:0.0373,h-velocity_loss:0.0212,h-e_q_loss:0.0115 +2022-10-31 23:49:53,665-215-global_steps:104800,b-rec_loss:0.0241,b-velocity_loss:0.0033,b-e_q_loss:0.0131,h-rec_loss:0.0373,h-velocity_loss:0.0212,h-e_q_loss:0.0115 +2022-10-31 23:51:56,036-278-epoch:37 +2022-10-31 23:52:13,429-215-global_steps:105200,b-rec_loss:0.0240,b-velocity_loss:0.0033,b-e_q_loss:0.0130,h-rec_loss:0.0373,h-velocity_loss:0.0212,h-e_q_loss:0.0115 +2022-10-31 23:54:30,823-215-global_steps:105600,b-rec_loss:0.0240,b-velocity_loss:0.0033,b-e_q_loss:0.0130,h-rec_loss:0.0371,h-velocity_loss:0.0211,h-e_q_loss:0.0115 +2022-10-31 23:56:49,522-215-global_steps:106000,b-rec_loss:0.0240,b-velocity_loss:0.0033,b-e_q_loss:0.0130,h-rec_loss:0.0371,h-velocity_loss:0.0211,h-e_q_loss:0.0115 +2022-10-31 23:59:09,542-215-global_steps:106400,b-rec_loss:0.0240,b-velocity_loss:0.0033,b-e_q_loss:0.0130,h-rec_loss:0.0376,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-11-01 00:01:29,637-215-global_steps:106800,b-rec_loss:0.0239,b-velocity_loss:0.0033,b-e_q_loss:0.0130,h-rec_loss:0.0376,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-11-01 00:03:49,115-215-global_steps:107200,b-rec_loss:0.0239,b-velocity_loss:0.0033,b-e_q_loss:0.0130,h-rec_loss:0.0376,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-11-01 00:06:09,818-215-global_steps:107600,b-rec_loss:0.0239,b-velocity_loss:0.0033,b-e_q_loss:0.0130,h-rec_loss:0.0375,h-velocity_loss:0.0212,h-e_q_loss:0.0116 +2022-11-01 00:08:29,382-278-epoch:38 +2022-11-01 00:08:32,428-215-global_steps:108000,b-rec_loss:0.0232,b-velocity_loss:0.0033,b-e_q_loss:0.0131,h-rec_loss:0.0371,h-velocity_loss:0.0212,h-e_q_loss:0.0114 +2022-11-01 00:10:50,008-215-global_steps:108400,b-rec_loss:0.0237,b-velocity_loss:0.0033,b-e_q_loss:0.0129,h-rec_loss:0.0370,h-velocity_loss:0.0211,h-e_q_loss:0.0115 +2022-11-01 00:13:07,855-215-global_steps:108800,b-rec_loss:0.0237,b-velocity_loss:0.0033,b-e_q_loss:0.0130,h-rec_loss:0.0370,h-velocity_loss:0.0211,h-e_q_loss:0.0115 +2022-11-01 00:15:26,061-215-global_steps:109200,b-rec_loss:0.0238,b-velocity_loss:0.0033,b-e_q_loss:0.0130,h-rec_loss:0.0370,h-velocity_loss:0.0211,h-e_q_loss:0.0115 +2022-11-01 00:17:45,933-215-global_steps:109600,b-rec_loss:0.0238,b-velocity_loss:0.0033,b-e_q_loss:0.0130,h-rec_loss:0.0370,h-velocity_loss:0.0211,h-e_q_loss:0.0115 +2022-11-01 00:20:04,358-215-global_steps:110000,b-rec_loss:0.0237,b-velocity_loss:0.0033,b-e_q_loss:0.0129,h-rec_loss:0.0369,h-velocity_loss:0.0211,h-e_q_loss:0.0115 +2022-11-01 00:22:24,199-215-global_steps:110400,b-rec_loss:0.0237,b-velocity_loss:0.0033,b-e_q_loss:0.0129,h-rec_loss:0.0369,h-velocity_loss:0.0211,h-e_q_loss:0.0115 +2022-11-01 00:24:43,766-215-global_steps:110800,b-rec_loss:0.0237,b-velocity_loss:0.0033,b-e_q_loss:0.0129,h-rec_loss:0.0370,h-velocity_loss:0.0211,h-e_q_loss:0.0115 +2022-11-01 00:24:57,143-278-epoch:39 +2022-11-01 00:27:04,500-215-global_steps:111200,b-rec_loss:0.0236,b-velocity_loss:0.0033,b-e_q_loss:0.0129,h-rec_loss:0.0371,h-velocity_loss:0.0212,h-e_q_loss:0.0115 +2022-11-01 00:29:23,863-215-global_steps:111600,b-rec_loss:0.0236,b-velocity_loss:0.0033,b-e_q_loss:0.0129,h-rec_loss:0.0370,h-velocity_loss:0.0211,h-e_q_loss:0.0115 +2022-11-01 00:31:42,353-215-global_steps:112000,b-rec_loss:0.0236,b-velocity_loss:0.0033,b-e_q_loss:0.0129,h-rec_loss:0.0370,h-velocity_loss:0.0211,h-e_q_loss:0.0115 +2022-11-01 00:34:01,438-215-global_steps:112400,b-rec_loss:0.0236,b-velocity_loss:0.0033,b-e_q_loss:0.0129,h-rec_loss:0.0369,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 00:36:19,306-215-global_steps:112800,b-rec_loss:0.0236,b-velocity_loss:0.0033,b-e_q_loss:0.0129,h-rec_loss:0.0369,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 00:38:37,507-215-global_steps:113200,b-rec_loss:0.0236,b-velocity_loss:0.0033,b-e_q_loss:0.0129,h-rec_loss:0.0369,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 00:40:56,085-215-global_steps:113600,b-rec_loss:0.0236,b-velocity_loss:0.0033,b-e_q_loss:0.0129,h-rec_loss:0.0369,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 00:41:26,805-278-epoch:40 +2022-11-01 00:43:20,724-215-global_steps:114000,b-rec_loss:0.0235,b-velocity_loss:0.0033,b-e_q_loss:0.0128,h-rec_loss:0.0368,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 00:45:38,020-215-global_steps:114400,b-rec_loss:0.0234,b-velocity_loss:0.0032,b-e_q_loss:0.0128,h-rec_loss:0.0368,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 00:47:54,788-215-global_steps:114800,b-rec_loss:0.0234,b-velocity_loss:0.0032,b-e_q_loss:0.0128,h-rec_loss:0.0368,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 00:50:13,166-215-global_steps:115200,b-rec_loss:0.0234,b-velocity_loss:0.0032,b-e_q_loss:0.0128,h-rec_loss:0.0368,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 00:52:33,189-215-global_steps:115600,b-rec_loss:0.0235,b-velocity_loss:0.0033,b-e_q_loss:0.0128,h-rec_loss:0.0368,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 00:54:50,834-215-global_steps:116000,b-rec_loss:0.0238,b-velocity_loss:0.0033,b-e_q_loss:0.0128,h-rec_loss:0.0368,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 00:57:09,405-215-global_steps:116400,b-rec_loss:0.0239,b-velocity_loss:0.0033,b-e_q_loss:0.0128,h-rec_loss:0.0368,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 00:57:52,666-278-epoch:41 +2022-11-01 00:59:31,926-215-global_steps:116800,b-rec_loss:0.0245,b-velocity_loss:0.0034,b-e_q_loss:0.0130,h-rec_loss:0.0367,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 01:01:52,666-215-global_steps:117200,b-rec_loss:0.0241,b-velocity_loss:0.0033,b-e_q_loss:0.0129,h-rec_loss:0.0367,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 01:04:11,242-215-global_steps:117600,b-rec_loss:0.0239,b-velocity_loss:0.0033,b-e_q_loss:0.0128,h-rec_loss:0.0366,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 01:06:29,677-215-global_steps:118000,b-rec_loss:0.0238,b-velocity_loss:0.0033,b-e_q_loss:0.0128,h-rec_loss:0.0366,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 01:08:49,489-215-global_steps:118400,b-rec_loss:0.0237,b-velocity_loss:0.0033,b-e_q_loss:0.0128,h-rec_loss:0.0366,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 01:11:08,220-215-global_steps:118800,b-rec_loss:0.0237,b-velocity_loss:0.0033,b-e_q_loss:0.0128,h-rec_loss:0.0367,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 01:13:27,367-215-global_steps:119200,b-rec_loss:0.0237,b-velocity_loss:0.0033,b-e_q_loss:0.0128,h-rec_loss:0.0367,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 01:14:23,677-278-epoch:42 +2022-11-01 01:15:47,184-215-global_steps:119600,b-rec_loss:0.0234,b-velocity_loss:0.0033,b-e_q_loss:0.0127,h-rec_loss:0.0367,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 01:18:07,895-215-global_steps:120000,b-rec_loss:0.0234,b-velocity_loss:0.0033,b-e_q_loss:0.0127,h-rec_loss:0.0366,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 01:20:26,646-215-global_steps:120400,b-rec_loss:0.0233,b-velocity_loss:0.0033,b-e_q_loss:0.0127,h-rec_loss:0.0366,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 01:22:43,844-215-global_steps:120800,b-rec_loss:0.0233,b-velocity_loss:0.0033,b-e_q_loss:0.0127,h-rec_loss:0.0366,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 01:25:01,107-215-global_steps:121200,b-rec_loss:0.0234,b-velocity_loss:0.0033,b-e_q_loss:0.0127,h-rec_loss:0.0366,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 01:27:22,017-215-global_steps:121600,b-rec_loss:0.0234,b-velocity_loss:0.0033,b-e_q_loss:0.0127,h-rec_loss:0.0366,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 01:29:40,903-215-global_steps:122000,b-rec_loss:0.0234,b-velocity_loss:0.0033,b-e_q_loss:0.0127,h-rec_loss:0.0366,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 01:30:52,302-278-epoch:43 +2022-11-01 01:32:00,088-215-global_steps:122400,b-rec_loss:0.0232,b-velocity_loss:0.0033,b-e_q_loss:0.0126,h-rec_loss:0.0365,h-velocity_loss:0.0211,h-e_q_loss:0.0113 +2022-11-01 01:34:18,729-215-global_steps:122800,b-rec_loss:0.0233,b-velocity_loss:0.0033,b-e_q_loss:0.0126,h-rec_loss:0.0364,h-velocity_loss:0.0211,h-e_q_loss:0.0113 +2022-11-01 01:36:38,506-215-global_steps:123200,b-rec_loss:0.0233,b-velocity_loss:0.0033,b-e_q_loss:0.0126,h-rec_loss:0.0364,h-velocity_loss:0.0211,h-e_q_loss:0.0113 +2022-11-01 01:38:56,521-215-global_steps:123600,b-rec_loss:0.0233,b-velocity_loss:0.0033,b-e_q_loss:0.0126,h-rec_loss:0.0364,h-velocity_loss:0.0211,h-e_q_loss:0.0113 +2022-11-01 01:41:13,199-215-global_steps:124000,b-rec_loss:0.0233,b-velocity_loss:0.0033,b-e_q_loss:0.0126,h-rec_loss:0.0365,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 01:43:31,949-215-global_steps:124400,b-rec_loss:0.0239,b-velocity_loss:0.0033,b-e_q_loss:0.0127,h-rec_loss:0.0365,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 01:45:52,975-215-global_steps:124800,b-rec_loss:0.0243,b-velocity_loss:0.0034,b-e_q_loss:0.0127,h-rec_loss:0.0365,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 01:47:19,905-278-epoch:44 +2022-11-01 01:48:12,971-215-global_steps:125200,b-rec_loss:0.0245,b-velocity_loss:0.0033,b-e_q_loss:0.0127,h-rec_loss:0.0362,h-velocity_loss:0.0210,h-e_q_loss:0.0113 +2022-11-01 01:50:30,352-215-global_steps:125600,b-rec_loss:0.0243,b-velocity_loss:0.0033,b-e_q_loss:0.0127,h-rec_loss:0.0365,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 01:52:49,358-215-global_steps:126000,b-rec_loss:0.0241,b-velocity_loss:0.0033,b-e_q_loss:0.0127,h-rec_loss:0.0365,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 01:55:07,901-215-global_steps:126400,b-rec_loss:0.0240,b-velocity_loss:0.0033,b-e_q_loss:0.0127,h-rec_loss:0.0365,h-velocity_loss:0.0211,h-e_q_loss:0.0114 +2022-11-01 01:57:25,092-215-global_steps:126800,b-rec_loss:0.0239,b-velocity_loss:0.0033,b-e_q_loss:0.0127,h-rec_loss:0.0364,h-velocity_loss:0.0211,h-e_q_loss:0.0113 +2022-11-01 01:59:42,291-215-global_steps:127200,b-rec_loss:0.0238,b-velocity_loss:0.0033,b-e_q_loss:0.0127,h-rec_loss:0.0364,h-velocity_loss:0.0211,h-e_q_loss:0.0113 +2022-11-01 02:02:02,277-215-global_steps:127600,b-rec_loss:0.0238,b-velocity_loss:0.0033,b-e_q_loss:0.0127,h-rec_loss:0.0364,h-velocity_loss:0.0211,h-e_q_loss:0.0113 +2022-11-01 02:03:42,598-278-epoch:45 +2022-11-01 02:04:21,575-215-global_steps:128000,b-rec_loss:0.0234,b-velocity_loss:0.0033,b-e_q_loss:0.0125,h-rec_loss:0.0362,h-velocity_loss:0.0211,h-e_q_loss:0.0112 +2022-11-01 02:06:40,212-215-global_steps:128400,b-rec_loss:0.0233,b-velocity_loss:0.0033,b-e_q_loss:0.0126,h-rec_loss:0.0362,h-velocity_loss:0.0210,h-e_q_loss:0.0113 +2022-11-01 02:08:56,953-215-global_steps:128800,b-rec_loss:0.0232,b-velocity_loss:0.0033,b-e_q_loss:0.0125,h-rec_loss:0.0362,h-velocity_loss:0.0210,h-e_q_loss:0.0113 +2022-11-01 02:11:15,992-215-global_steps:129200,b-rec_loss:0.0232,b-velocity_loss:0.0033,b-e_q_loss:0.0125,h-rec_loss:0.0362,h-velocity_loss:0.0210,h-e_q_loss:0.0113 +2022-11-01 02:13:35,464-215-global_steps:129600,b-rec_loss:0.0233,b-velocity_loss:0.0033,b-e_q_loss:0.0126,h-rec_loss:0.0362,h-velocity_loss:0.0211,h-e_q_loss:0.0113 +2022-11-01 02:15:51,991-215-global_steps:130000,b-rec_loss:0.0234,b-velocity_loss:0.0033,b-e_q_loss:0.0126,h-rec_loss:0.0362,h-velocity_loss:0.0210,h-e_q_loss:0.0113 +2022-11-01 02:18:08,426-215-global_steps:130400,b-rec_loss:0.0234,b-velocity_loss:0.0033,b-e_q_loss:0.0125,h-rec_loss:0.0362,h-velocity_loss:0.0210,h-e_q_loss:0.0113 +2022-11-01 02:20:04,001-278-epoch:46 +2022-11-01 02:20:29,278-215-global_steps:130800,b-rec_loss:0.0231,b-velocity_loss:0.0032,b-e_q_loss:0.0124,h-rec_loss:0.0362,h-velocity_loss:0.0212,h-e_q_loss:0.0112 +2022-11-01 02:22:47,310-215-global_steps:131200,b-rec_loss:0.0230,b-velocity_loss:0.0032,b-e_q_loss:0.0124,h-rec_loss:0.0361,h-velocity_loss:0.0211,h-e_q_loss:0.0112 +2022-11-01 02:25:03,302-215-global_steps:131600,b-rec_loss:0.0231,b-velocity_loss:0.0032,b-e_q_loss:0.0124,h-rec_loss:0.0362,h-velocity_loss:0.0211,h-e_q_loss:0.0113 +2022-11-01 02:27:23,026-215-global_steps:132000,b-rec_loss:0.0231,b-velocity_loss:0.0032,b-e_q_loss:0.0124,h-rec_loss:0.0362,h-velocity_loss:0.0211,h-e_q_loss:0.0113 +2022-11-01 02:29:43,359-215-global_steps:132400,b-rec_loss:0.0230,b-velocity_loss:0.0032,b-e_q_loss:0.0124,h-rec_loss:0.0362,h-velocity_loss:0.0210,h-e_q_loss:0.0113 +2022-11-01 02:32:01,225-215-global_steps:132800,b-rec_loss:0.0231,b-velocity_loss:0.0032,b-e_q_loss:0.0124,h-rec_loss:0.0362,h-velocity_loss:0.0210,h-e_q_loss:0.0113 +2022-11-01 02:34:17,853-215-global_steps:133200,b-rec_loss:0.0230,b-velocity_loss:0.0032,b-e_q_loss:0.0124,h-rec_loss:0.0361,h-velocity_loss:0.0210,h-e_q_loss:0.0113 +2022-11-01 02:36:25,550-278-epoch:47 +2022-11-01 02:36:35,825-215-global_steps:133600,b-rec_loss:0.0228,b-velocity_loss:0.0032,b-e_q_loss:0.0122,h-rec_loss:0.0365,h-velocity_loss:0.0213,h-e_q_loss:0.0112 +2022-11-01 02:38:54,221-215-global_steps:134000,b-rec_loss:0.0231,b-velocity_loss:0.0033,b-e_q_loss:0.0123,h-rec_loss:0.0361,h-velocity_loss:0.0211,h-e_q_loss:0.0112 +2022-11-01 02:41:13,232-215-global_steps:134400,b-rec_loss:0.0233,b-velocity_loss:0.0033,b-e_q_loss:0.0124,h-rec_loss:0.0360,h-velocity_loss:0.0210,h-e_q_loss:0.0113 +2022-11-01 02:43:30,472-215-global_steps:134800,b-rec_loss:0.0234,b-velocity_loss:0.0033,b-e_q_loss:0.0124,h-rec_loss:0.0360,h-velocity_loss:0.0210,h-e_q_loss:0.0113 +2022-11-01 02:45:48,070-215-global_steps:135200,b-rec_loss:0.0233,b-velocity_loss:0.0033,b-e_q_loss:0.0124,h-rec_loss:0.0360,h-velocity_loss:0.0210,h-e_q_loss:0.0113 +2022-11-01 02:48:06,938-215-global_steps:135600,b-rec_loss:0.0232,b-velocity_loss:0.0032,b-e_q_loss:0.0124,h-rec_loss:0.0360,h-velocity_loss:0.0210,h-e_q_loss:0.0113 +2022-11-01 02:50:24,937-215-global_steps:136000,b-rec_loss:0.0231,b-velocity_loss:0.0032,b-e_q_loss:0.0124,h-rec_loss:0.0360,h-velocity_loss:0.0210,h-e_q_loss:0.0113 +2022-11-01 02:52:42,254-215-global_steps:136400,b-rec_loss:0.0231,b-velocity_loss:0.0032,b-e_q_loss:0.0123,h-rec_loss:0.0360,h-velocity_loss:0.0210,h-e_q_loss:0.0113 +2022-11-01 02:52:47,967-278-epoch:48 +2022-11-01 02:55:00,856-215-global_steps:136800,b-rec_loss:0.0226,b-velocity_loss:0.0032,b-e_q_loss:0.0123,h-rec_loss:0.0359,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 02:57:19,303-215-global_steps:137200,b-rec_loss:0.0226,b-velocity_loss:0.0032,b-e_q_loss:0.0122,h-rec_loss:0.0359,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 02:59:36,112-215-global_steps:137600,b-rec_loss:0.0227,b-velocity_loss:0.0032,b-e_q_loss:0.0122,h-rec_loss:0.0359,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:01:52,162-215-global_steps:138000,b-rec_loss:0.0227,b-velocity_loss:0.0032,b-e_q_loss:0.0122,h-rec_loss:0.0360,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:04:12,174-215-global_steps:138400,b-rec_loss:0.0227,b-velocity_loss:0.0032,b-e_q_loss:0.0122,h-rec_loss:0.0360,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:06:30,439-215-global_steps:138800,b-rec_loss:0.0227,b-velocity_loss:0.0032,b-e_q_loss:0.0122,h-rec_loss:0.0359,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:08:47,834-215-global_steps:139200,b-rec_loss:0.0228,b-velocity_loss:0.0032,b-e_q_loss:0.0122,h-rec_loss:0.0360,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:09:07,637-278-epoch:49 +2022-11-01 03:11:06,058-215-global_steps:139600,b-rec_loss:0.0225,b-velocity_loss:0.0032,b-e_q_loss:0.0121,h-rec_loss:0.0359,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:13:23,365-215-global_steps:140000,b-rec_loss:0.0226,b-velocity_loss:0.0032,b-e_q_loss:0.0122,h-rec_loss:0.0358,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:15:42,289-215-global_steps:140400,b-rec_loss:0.0226,b-velocity_loss:0.0032,b-e_q_loss:0.0122,h-rec_loss:0.0358,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:17:58,630-215-global_steps:140800,b-rec_loss:0.0226,b-velocity_loss:0.0032,b-e_q_loss:0.0122,h-rec_loss:0.0359,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:20:14,735-215-global_steps:141200,b-rec_loss:0.0226,b-velocity_loss:0.0032,b-e_q_loss:0.0122,h-rec_loss:0.0359,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:22:34,762-215-global_steps:141600,b-rec_loss:0.0226,b-velocity_loss:0.0032,b-e_q_loss:0.0122,h-rec_loss:0.0359,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:24:52,551-215-global_steps:142000,b-rec_loss:0.0227,b-velocity_loss:0.0032,b-e_q_loss:0.0122,h-rec_loss:0.0359,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:25:27,938-278-epoch:50 +2022-11-01 03:27:10,876-215-global_steps:142400,b-rec_loss:0.0225,b-velocity_loss:0.0032,b-e_q_loss:0.0122,h-rec_loss:0.0358,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:29:28,294-215-global_steps:142800,b-rec_loss:0.0225,b-velocity_loss:0.0032,b-e_q_loss:0.0121,h-rec_loss:0.0357,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:31:45,994-215-global_steps:143200,b-rec_loss:0.0232,b-velocity_loss:0.0033,b-e_q_loss:0.0122,h-rec_loss:0.0357,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:34:05,106-215-global_steps:143600,b-rec_loss:0.0231,b-velocity_loss:0.0033,b-e_q_loss:0.0122,h-rec_loss:0.0357,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:36:23,021-215-global_steps:144000,b-rec_loss:0.0231,b-velocity_loss:0.0032,b-e_q_loss:0.0122,h-rec_loss:0.0357,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:38:39,710-215-global_steps:144400,b-rec_loss:0.0230,b-velocity_loss:0.0032,b-e_q_loss:0.0122,h-rec_loss:0.0357,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:40:58,432-215-global_steps:144800,b-rec_loss:0.0229,b-velocity_loss:0.0032,b-e_q_loss:0.0122,h-rec_loss:0.0357,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:41:48,300-278-epoch:51 +2022-11-01 03:43:18,034-215-global_steps:145200,b-rec_loss:0.0224,b-velocity_loss:0.0032,b-e_q_loss:0.0121,h-rec_loss:0.0356,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:45:34,543-215-global_steps:145600,b-rec_loss:0.0225,b-velocity_loss:0.0032,b-e_q_loss:0.0121,h-rec_loss:0.0357,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:47:51,362-215-global_steps:146000,b-rec_loss:0.0224,b-velocity_loss:0.0032,b-e_q_loss:0.0121,h-rec_loss:0.0356,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:50:09,906-215-global_steps:146400,b-rec_loss:0.0224,b-velocity_loss:0.0032,b-e_q_loss:0.0121,h-rec_loss:0.0356,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:52:27,842-215-global_steps:146800,b-rec_loss:0.0224,b-velocity_loss:0.0032,b-e_q_loss:0.0121,h-rec_loss:0.0356,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:54:44,725-215-global_steps:147200,b-rec_loss:0.0224,b-velocity_loss:0.0032,b-e_q_loss:0.0120,h-rec_loss:0.0356,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:57:01,561-215-global_steps:147600,b-rec_loss:0.0224,b-velocity_loss:0.0032,b-e_q_loss:0.0120,h-rec_loss:0.0356,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 03:58:05,114-278-epoch:52 +2022-11-01 03:59:21,298-215-global_steps:148000,b-rec_loss:0.0223,b-velocity_loss:0.0032,b-e_q_loss:0.0120,h-rec_loss:0.0356,h-velocity_loss:0.0210,h-e_q_loss:0.0111 +2022-11-01 04:01:41,355-215-global_steps:148400,b-rec_loss:0.0222,b-velocity_loss:0.0032,b-e_q_loss:0.0120,h-rec_loss:0.0356,h-velocity_loss:0.0210,h-e_q_loss:0.0111 +2022-11-01 04:03:57,031-215-global_steps:148800,b-rec_loss:0.0222,b-velocity_loss:0.0032,b-e_q_loss:0.0120,h-rec_loss:0.0355,h-velocity_loss:0.0209,h-e_q_loss:0.0111 +2022-11-01 04:06:15,572-215-global_steps:149200,b-rec_loss:0.0222,b-velocity_loss:0.0032,b-e_q_loss:0.0120,h-rec_loss:0.0356,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 04:08:34,476-215-global_steps:149600,b-rec_loss:0.0222,b-velocity_loss:0.0032,b-e_q_loss:0.0120,h-rec_loss:0.0356,h-velocity_loss:0.0210,h-e_q_loss:0.0111 +2022-11-01 04:10:53,296-215-global_steps:150000,b-rec_loss:0.0224,b-velocity_loss:0.0032,b-e_q_loss:0.0120,h-rec_loss:0.0356,h-velocity_loss:0.0210,h-e_q_loss:0.0111 +2022-11-01 04:13:09,901-215-global_steps:150400,b-rec_loss:0.0224,b-velocity_loss:0.0032,b-e_q_loss:0.0120,h-rec_loss:0.0356,h-velocity_loss:0.0210,h-e_q_loss:0.0111 +2022-11-01 04:14:27,763-278-epoch:53 +2022-11-01 04:15:28,998-215-global_steps:150800,b-rec_loss:0.0221,b-velocity_loss:0.0032,b-e_q_loss:0.0119,h-rec_loss:0.0355,h-velocity_loss:0.0210,h-e_q_loss:0.0111 +2022-11-01 04:17:47,750-215-global_steps:151200,b-rec_loss:0.0221,b-velocity_loss:0.0032,b-e_q_loss:0.0119,h-rec_loss:0.0354,h-velocity_loss:0.0210,h-e_q_loss:0.0111 +2022-11-01 04:20:04,616-215-global_steps:151600,b-rec_loss:0.0221,b-velocity_loss:0.0032,b-e_q_loss:0.0119,h-rec_loss:0.0355,h-velocity_loss:0.0210,h-e_q_loss:0.0111 +2022-11-01 04:22:19,889-215-global_steps:152000,b-rec_loss:0.0221,b-velocity_loss:0.0032,b-e_q_loss:0.0119,h-rec_loss:0.0355,h-velocity_loss:0.0210,h-e_q_loss:0.0111 +2022-11-01 04:24:37,147-215-global_steps:152400,b-rec_loss:0.0221,b-velocity_loss:0.0032,b-e_q_loss:0.0119,h-rec_loss:0.0355,h-velocity_loss:0.0210,h-e_q_loss:0.0111 +2022-11-01 04:26:56,116-215-global_steps:152800,b-rec_loss:0.0221,b-velocity_loss:0.0032,b-e_q_loss:0.0119,h-rec_loss:0.0355,h-velocity_loss:0.0210,h-e_q_loss:0.0111 +2022-11-01 04:29:12,247-215-global_steps:153200,b-rec_loss:0.0221,b-velocity_loss:0.0032,b-e_q_loss:0.0119,h-rec_loss:0.0355,h-velocity_loss:0.0210,h-e_q_loss:0.0111 +2022-11-01 04:30:43,656-278-epoch:54 +2022-11-01 04:31:29,684-215-global_steps:153600,b-rec_loss:0.0222,b-velocity_loss:0.0032,b-e_q_loss:0.0119,h-rec_loss:0.0353,h-velocity_loss:0.0209,h-e_q_loss:0.0111 +2022-11-01 04:33:46,675-215-global_steps:154000,b-rec_loss:0.0221,b-velocity_loss:0.0032,b-e_q_loss:0.0119,h-rec_loss:0.0354,h-velocity_loss:0.0210,h-e_q_loss:0.0111 +2022-11-01 04:36:03,857-215-global_steps:154400,b-rec_loss:0.0221,b-velocity_loss:0.0032,b-e_q_loss:0.0119,h-rec_loss:0.0354,h-velocity_loss:0.0210,h-e_q_loss:0.0111 +2022-11-01 04:38:20,579-215-global_steps:154800,b-rec_loss:0.0221,b-velocity_loss:0.0032,b-e_q_loss:0.0119,h-rec_loss:0.0354,h-velocity_loss:0.0209,h-e_q_loss:0.0111 +2022-11-01 04:40:36,156-215-global_steps:155200,b-rec_loss:0.0221,b-velocity_loss:0.0032,b-e_q_loss:0.0119,h-rec_loss:0.0354,h-velocity_loss:0.0209,h-e_q_loss:0.0111 +2022-11-01 04:42:53,064-215-global_steps:155600,b-rec_loss:0.0220,b-velocity_loss:0.0032,b-e_q_loss:0.0119,h-rec_loss:0.0354,h-velocity_loss:0.0209,h-e_q_loss:0.0111 +2022-11-01 04:45:11,098-215-global_steps:156000,b-rec_loss:0.0221,b-velocity_loss:0.0032,b-e_q_loss:0.0119,h-rec_loss:0.0354,h-velocity_loss:0.0209,h-e_q_loss:0.0111 +2022-11-01 04:46:56,971-278-epoch:55 +2022-11-01 04:47:29,220-215-global_steps:156400,b-rec_loss:0.0220,b-velocity_loss:0.0032,b-e_q_loss:0.0117,h-rec_loss:0.0351,h-velocity_loss:0.0208,h-e_q_loss:0.0110 +2022-11-01 04:49:44,577-215-global_steps:156800,b-rec_loss:0.0220,b-velocity_loss:0.0032,b-e_q_loss:0.0118,h-rec_loss:0.0353,h-velocity_loss:0.0209,h-e_q_loss:0.0111 +2022-11-01 04:52:03,815-215-global_steps:157200,b-rec_loss:0.0219,b-velocity_loss:0.0032,b-e_q_loss:0.0118,h-rec_loss:0.0353,h-velocity_loss:0.0210,h-e_q_loss:0.0111 +2022-11-01 04:54:20,697-215-global_steps:157600,b-rec_loss:0.0220,b-velocity_loss:0.0032,b-e_q_loss:0.0118,h-rec_loss:0.0353,h-velocity_loss:0.0209,h-e_q_loss:0.0111 +2022-11-01 04:56:37,407-215-global_steps:158000,b-rec_loss:0.0220,b-velocity_loss:0.0032,b-e_q_loss:0.0118,h-rec_loss:0.0353,h-velocity_loss:0.0209,h-e_q_loss:0.0111 +2022-11-01 04:58:53,188-215-global_steps:158400,b-rec_loss:0.0220,b-velocity_loss:0.0032,b-e_q_loss:0.0118,h-rec_loss:0.0353,h-velocity_loss:0.0209,h-e_q_loss:0.0111 +2022-11-01 05:01:11,359-215-global_steps:158800,b-rec_loss:0.0220,b-velocity_loss:0.0032,b-e_q_loss:0.0118,h-rec_loss:0.0353,h-velocity_loss:0.0209,h-e_q_loss:0.0111 +2022-11-01 05:03:13,828-278-epoch:56 +2022-11-01 05:03:31,435-215-global_steps:159200,b-rec_loss:0.0218,b-velocity_loss:0.0032,b-e_q_loss:0.0117,h-rec_loss:0.0354,h-velocity_loss:0.0210,h-e_q_loss:0.0110 +2022-11-01 05:05:47,253-215-global_steps:159600,b-rec_loss:0.0221,b-velocity_loss:0.0032,b-e_q_loss:0.0118,h-rec_loss:0.0352,h-velocity_loss:0.0209,h-e_q_loss:0.0111 +2022-11-01 05:08:03,425-215-global_steps:160000,b-rec_loss:0.0220,b-velocity_loss:0.0032,b-e_q_loss:0.0118,h-rec_loss:0.0352,h-velocity_loss:0.0209,h-e_q_loss:0.0111 +2022-11-01 05:10:23,341-215-global_steps:160400,b-rec_loss:0.0219,b-velocity_loss:0.0032,b-e_q_loss:0.0117,h-rec_loss:0.0352,h-velocity_loss:0.0209,h-e_q_loss:0.0111 +2022-11-01 05:12:41,351-215-global_steps:160800,b-rec_loss:0.0219,b-velocity_loss:0.0032,b-e_q_loss:0.0117,h-rec_loss:0.0358,h-velocity_loss:0.0210,h-e_q_loss:0.0112 +2022-11-01 05:14:57,155-215-global_steps:161200,b-rec_loss:0.0219,b-velocity_loss:0.0032,b-e_q_loss:0.0117,h-rec_loss:0.0361,h-velocity_loss:0.0210,h-e_q_loss:0.0113 +2022-11-01 05:17:13,973-215-global_steps:161600,b-rec_loss:0.0219,b-velocity_loss:0.0032,b-e_q_loss:0.0117,h-rec_loss:0.0361,h-velocity_loss:0.0210,h-e_q_loss:0.0113 +2022-11-01 05:19:30,378-278-epoch:57 +2022-11-01 05:19:33,909-215-global_steps:162000,b-rec_loss:0.0216,b-velocity_loss:0.0031,b-e_q_loss:0.0115,h-rec_loss:0.0353,h-velocity_loss:0.0208,h-e_q_loss:0.0111 +2022-11-01 05:21:53,465-215-global_steps:162400,b-rec_loss:0.0217,b-velocity_loss:0.0032,b-e_q_loss:0.0117,h-rec_loss:0.0355,h-velocity_loss:0.0209,h-e_q_loss:0.0112 +2022-11-01 05:24:09,992-215-global_steps:162800,b-rec_loss:0.0217,b-velocity_loss:0.0032,b-e_q_loss:0.0117,h-rec_loss:0.0354,h-velocity_loss:0.0209,h-e_q_loss:0.0111 +2022-11-01 05:26:28,138-215-global_steps:163200,b-rec_loss:0.0217,b-velocity_loss:0.0032,b-e_q_loss:0.0117,h-rec_loss:0.0353,h-velocity_loss:0.0209,h-e_q_loss:0.0111 +2022-11-01 05:28:46,072-215-global_steps:163600,b-rec_loss:0.0217,b-velocity_loss:0.0032,b-e_q_loss:0.0117,h-rec_loss:0.0353,h-velocity_loss:0.0209,h-e_q_loss:0.0111 +2022-11-01 05:31:05,519-215-global_steps:164000,b-rec_loss:0.0217,b-velocity_loss:0.0032,b-e_q_loss:0.0117,h-rec_loss:0.0352,h-velocity_loss:0.0209,h-e_q_loss:0.0111 +2022-11-01 05:33:21,265-215-global_steps:164400,b-rec_loss:0.0217,b-velocity_loss:0.0032,b-e_q_loss:0.0117,h-rec_loss:0.0352,h-velocity_loss:0.0209,h-e_q_loss:0.0111 +2022-11-01 05:35:39,592-215-global_steps:164800,b-rec_loss:0.0217,b-velocity_loss:0.0032,b-e_q_loss:0.0117,h-rec_loss:0.0352,h-velocity_loss:0.0209,h-e_q_loss:0.0111 +2022-11-01 05:35:52,314-278-epoch:58 +2022-11-01 05:37:59,708-215-global_steps:165200,b-rec_loss:0.0218,b-velocity_loss:0.0032,b-e_q_loss:0.0117,h-rec_loss:0.0350,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 05:40:19,446-215-global_steps:165600,b-rec_loss:0.0220,b-velocity_loss:0.0032,b-e_q_loss:0.0118,h-rec_loss:0.0350,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 05:42:37,056-215-global_steps:166000,b-rec_loss:0.0219,b-velocity_loss:0.0032,b-e_q_loss:0.0117,h-rec_loss:0.0350,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 05:44:54,330-215-global_steps:166400,b-rec_loss:0.0219,b-velocity_loss:0.0032,b-e_q_loss:0.0117,h-rec_loss:0.0350,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 05:47:12,927-215-global_steps:166800,b-rec_loss:0.0218,b-velocity_loss:0.0032,b-e_q_loss:0.0117,h-rec_loss:0.0350,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 05:49:31,125-215-global_steps:167200,b-rec_loss:0.0218,b-velocity_loss:0.0032,b-e_q_loss:0.0117,h-rec_loss:0.0350,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 05:51:47,201-215-global_steps:167600,b-rec_loss:0.0218,b-velocity_loss:0.0032,b-e_q_loss:0.0117,h-rec_loss:0.0350,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 05:52:14,026-278-epoch:59 +2022-11-01 05:54:06,746-215-global_steps:168000,b-rec_loss:0.0215,b-velocity_loss:0.0032,b-e_q_loss:0.0116,h-rec_loss:0.0350,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 05:56:25,511-215-global_steps:168400,b-rec_loss:0.0217,b-velocity_loss:0.0032,b-e_q_loss:0.0116,h-rec_loss:0.0350,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 05:58:41,720-215-global_steps:168800,b-rec_loss:0.0216,b-velocity_loss:0.0032,b-e_q_loss:0.0116,h-rec_loss:0.0350,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 06:00:57,287-215-global_steps:169200,b-rec_loss:0.0216,b-velocity_loss:0.0032,b-e_q_loss:0.0116,h-rec_loss:0.0350,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 06:03:16,137-215-global_steps:169600,b-rec_loss:0.0216,b-velocity_loss:0.0032,b-e_q_loss:0.0116,h-rec_loss:0.0350,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 06:05:33,404-215-global_steps:170000,b-rec_loss:0.0216,b-velocity_loss:0.0032,b-e_q_loss:0.0116,h-rec_loss:0.0350,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 06:07:51,037-215-global_steps:170400,b-rec_loss:0.0215,b-velocity_loss:0.0032,b-e_q_loss:0.0116,h-rec_loss:0.0350,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 06:08:32,232-278-epoch:60 +2022-11-01 06:10:10,328-215-global_steps:170800,b-rec_loss:0.0216,b-velocity_loss:0.0032,b-e_q_loss:0.0116,h-rec_loss:0.0350,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 06:12:27,761-215-global_steps:171200,b-rec_loss:0.0216,b-velocity_loss:0.0032,b-e_q_loss:0.0116,h-rec_loss:0.0349,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 06:14:44,972-215-global_steps:171600,b-rec_loss:0.0215,b-velocity_loss:0.0032,b-e_q_loss:0.0116,h-rec_loss:0.0349,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 06:17:02,772-215-global_steps:172000,b-rec_loss:0.0215,b-velocity_loss:0.0032,b-e_q_loss:0.0116,h-rec_loss:0.0349,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 06:19:18,688-215-global_steps:172400,b-rec_loss:0.0215,b-velocity_loss:0.0032,b-e_q_loss:0.0116,h-rec_loss:0.0350,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 06:21:38,390-215-global_steps:172800,b-rec_loss:0.0215,b-velocity_loss:0.0032,b-e_q_loss:0.0116,h-rec_loss:0.0350,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 06:23:56,007-215-global_steps:173200,b-rec_loss:0.0214,b-velocity_loss:0.0032,b-e_q_loss:0.0115,h-rec_loss:0.0350,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 06:24:51,195-278-epoch:61 +2022-11-01 06:26:14,396-215-global_steps:173600,b-rec_loss:0.0214,b-velocity_loss:0.0032,b-e_q_loss:0.0115,h-rec_loss:0.0349,h-velocity_loss:0.0209,h-e_q_loss:0.0109 +2022-11-01 06:28:29,906-215-global_steps:174000,b-rec_loss:0.0214,b-velocity_loss:0.0032,b-e_q_loss:0.0115,h-rec_loss:0.0349,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 06:30:47,652-215-global_steps:174400,b-rec_loss:0.0214,b-velocity_loss:0.0032,b-e_q_loss:0.0115,h-rec_loss:0.0349,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 06:33:05,072-215-global_steps:174800,b-rec_loss:0.0214,b-velocity_loss:0.0032,b-e_q_loss:0.0115,h-rec_loss:0.0349,h-velocity_loss:0.0209,h-e_q_loss:0.0109 +2022-11-01 06:35:20,651-215-global_steps:175200,b-rec_loss:0.0214,b-velocity_loss:0.0032,b-e_q_loss:0.0115,h-rec_loss:0.0349,h-velocity_loss:0.0209,h-e_q_loss:0.0109 +2022-11-01 06:37:36,673-215-global_steps:175600,b-rec_loss:0.0214,b-velocity_loss:0.0032,b-e_q_loss:0.0115,h-rec_loss:0.0349,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 06:39:54,127-215-global_steps:176000,b-rec_loss:0.0214,b-velocity_loss:0.0032,b-e_q_loss:0.0115,h-rec_loss:0.0349,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 06:41:03,997-278-epoch:62 +2022-11-01 06:42:12,005-215-global_steps:176400,b-rec_loss:0.0212,b-velocity_loss:0.0032,b-e_q_loss:0.0115,h-rec_loss:0.0348,h-velocity_loss:0.0210,h-e_q_loss:0.0109 +2022-11-01 06:44:27,309-215-global_steps:176800,b-rec_loss:0.0213,b-velocity_loss:0.0032,b-e_q_loss:0.0115,h-rec_loss:0.0348,h-velocity_loss:0.0209,h-e_q_loss:0.0109 +2022-11-01 06:46:43,006-215-global_steps:177200,b-rec_loss:0.0213,b-velocity_loss:0.0032,b-e_q_loss:0.0114,h-rec_loss:0.0348,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 06:49:01,602-215-global_steps:177600,b-rec_loss:0.0212,b-velocity_loss:0.0032,b-e_q_loss:0.0114,h-rec_loss:0.0349,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 06:51:18,385-215-global_steps:178000,b-rec_loss:0.0212,b-velocity_loss:0.0032,b-e_q_loss:0.0114,h-rec_loss:0.0349,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 06:53:33,699-215-global_steps:178400,b-rec_loss:0.0212,b-velocity_loss:0.0032,b-e_q_loss:0.0114,h-rec_loss:0.0349,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 06:55:50,230-215-global_steps:178800,b-rec_loss:0.0212,b-velocity_loss:0.0032,b-e_q_loss:0.0114,h-rec_loss:0.0349,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 06:57:14,457-278-epoch:63 +2022-11-01 06:58:08,610-215-global_steps:179200,b-rec_loss:0.0211,b-velocity_loss:0.0032,b-e_q_loss:0.0114,h-rec_loss:0.0348,h-velocity_loss:0.0209,h-e_q_loss:0.0109 +2022-11-01 07:00:25,981-215-global_steps:179600,b-rec_loss:0.0211,b-velocity_loss:0.0032,b-e_q_loss:0.0114,h-rec_loss:0.0348,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 07:02:40,928-215-global_steps:180000,b-rec_loss:0.0211,b-velocity_loss:0.0031,b-e_q_loss:0.0114,h-rec_loss:0.0348,h-velocity_loss:0.0209,h-e_q_loss:0.0109 +2022-11-01 07:04:57,614-215-global_steps:180400,b-rec_loss:0.0212,b-velocity_loss:0.0032,b-e_q_loss:0.0114,h-rec_loss:0.0348,h-velocity_loss:0.0209,h-e_q_loss:0.0109 +2022-11-01 07:07:15,871-215-global_steps:180800,b-rec_loss:0.0212,b-velocity_loss:0.0032,b-e_q_loss:0.0114,h-rec_loss:0.0348,h-velocity_loss:0.0209,h-e_q_loss:0.0109 +2022-11-01 07:09:32,032-215-global_steps:181200,b-rec_loss:0.0212,b-velocity_loss:0.0032,b-e_q_loss:0.0114,h-rec_loss:0.0348,h-velocity_loss:0.0209,h-e_q_loss:0.0109 +2022-11-01 07:11:46,745-215-global_steps:181600,b-rec_loss:0.0212,b-velocity_loss:0.0032,b-e_q_loss:0.0114,h-rec_loss:0.0348,h-velocity_loss:0.0209,h-e_q_loss:0.0109 +2022-11-01 07:13:26,159-278-epoch:64 +2022-11-01 07:14:05,541-215-global_steps:182000,b-rec_loss:0.0211,b-velocity_loss:0.0032,b-e_q_loss:0.0113,h-rec_loss:0.0346,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 07:16:22,161-215-global_steps:182400,b-rec_loss:0.0211,b-velocity_loss:0.0031,b-e_q_loss:0.0113,h-rec_loss:0.0347,h-velocity_loss:0.0209,h-e_q_loss:0.0109 +2022-11-01 07:18:38,609-215-global_steps:182800,b-rec_loss:0.0210,b-velocity_loss:0.0031,b-e_q_loss:0.0113,h-rec_loss:0.0347,h-velocity_loss:0.0209,h-e_q_loss:0.0109 +2022-11-01 07:20:53,699-215-global_steps:183200,b-rec_loss:0.0211,b-velocity_loss:0.0031,b-e_q_loss:0.0113,h-rec_loss:0.0347,h-velocity_loss:0.0209,h-e_q_loss:0.0109 +2022-11-01 07:23:11,844-215-global_steps:183600,b-rec_loss:0.0210,b-velocity_loss:0.0031,b-e_q_loss:0.0113,h-rec_loss:0.0347,h-velocity_loss:0.0209,h-e_q_loss:0.0109 +2022-11-01 07:25:29,555-215-global_steps:184000,b-rec_loss:0.0210,b-velocity_loss:0.0031,b-e_q_loss:0.0113,h-rec_loss:0.0347,h-velocity_loss:0.0209,h-e_q_loss:0.0109 +2022-11-01 07:27:46,327-215-global_steps:184400,b-rec_loss:0.0211,b-velocity_loss:0.0031,b-e_q_loss:0.0113,h-rec_loss:0.0347,h-velocity_loss:0.0209,h-e_q_loss:0.0109 +2022-11-01 07:29:38,800-278-epoch:65 +2022-11-01 07:30:03,815-215-global_steps:184800,b-rec_loss:0.0209,b-velocity_loss:0.0031,b-e_q_loss:0.0112,h-rec_loss:0.0346,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 07:32:20,710-215-global_steps:185200,b-rec_loss:0.0211,b-velocity_loss:0.0032,b-e_q_loss:0.0113,h-rec_loss:0.0347,h-velocity_loss:0.0209,h-e_q_loss:0.0109 +2022-11-01 07:34:38,481-215-global_steps:185600,b-rec_loss:0.0211,b-velocity_loss:0.0032,b-e_q_loss:0.0113,h-rec_loss:0.0348,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 07:36:54,029-215-global_steps:186000,b-rec_loss:0.0210,b-velocity_loss:0.0032,b-e_q_loss:0.0113,h-rec_loss:0.0349,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 07:39:09,046-215-global_steps:186400,b-rec_loss:0.0210,b-velocity_loss:0.0031,b-e_q_loss:0.0113,h-rec_loss:0.0348,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 07:41:25,825-215-global_steps:186800,b-rec_loss:0.0210,b-velocity_loss:0.0031,b-e_q_loss:0.0113,h-rec_loss:0.0348,h-velocity_loss:0.0209,h-e_q_loss:0.0110 +2022-11-01 07:43:42,949-215-global_steps:187200,b-rec_loss:0.0210,b-velocity_loss:0.0031,b-e_q_loss:0.0113,h-rec_loss:0.0347,h-velocity_loss:0.0209,h-e_q_loss:0.0109 +2022-11-01 07:45:49,602-278-epoch:66 +2022-11-01 07:46:00,614-215-global_steps:187600,b-rec_loss:0.0210,b-velocity_loss:0.0031,b-e_q_loss:0.0112,h-rec_loss:0.0344,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 07:48:15,680-215-global_steps:188000,b-rec_loss:0.0209,b-velocity_loss:0.0031,b-e_q_loss:0.0112,h-rec_loss:0.0344,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 07:50:31,879-215-global_steps:188400,b-rec_loss:0.0209,b-velocity_loss:0.0031,b-e_q_loss:0.0112,h-rec_loss:0.0345,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 07:52:48,362-215-global_steps:188800,b-rec_loss:0.0211,b-velocity_loss:0.0032,b-e_q_loss:0.0113,h-rec_loss:0.0345,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 07:55:03,323-215-global_steps:189200,b-rec_loss:0.0210,b-velocity_loss:0.0032,b-e_q_loss:0.0113,h-rec_loss:0.0346,h-velocity_loss:0.0209,h-e_q_loss:0.0109 +2022-11-01 07:57:19,373-215-global_steps:189600,b-rec_loss:0.0210,b-velocity_loss:0.0032,b-e_q_loss:0.0113,h-rec_loss:0.0345,h-velocity_loss:0.0209,h-e_q_loss:0.0109 +2022-11-01 07:59:35,847-215-global_steps:190000,b-rec_loss:0.0210,b-velocity_loss:0.0031,b-e_q_loss:0.0113,h-rec_loss:0.0345,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 08:01:52,850-215-global_steps:190400,b-rec_loss:0.0210,b-velocity_loss:0.0031,b-e_q_loss:0.0113,h-rec_loss:0.0345,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 08:01:57,822-278-epoch:67 +2022-11-01 08:04:09,674-215-global_steps:190800,b-rec_loss:0.0208,b-velocity_loss:0.0031,b-e_q_loss:0.0112,h-rec_loss:0.0345,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 08:06:25,142-215-global_steps:191200,b-rec_loss:0.0208,b-velocity_loss:0.0031,b-e_q_loss:0.0112,h-rec_loss:0.0344,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 08:08:42,711-215-global_steps:191600,b-rec_loss:0.0208,b-velocity_loss:0.0031,b-e_q_loss:0.0112,h-rec_loss:0.0344,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 08:10:58,890-215-global_steps:192000,b-rec_loss:0.0208,b-velocity_loss:0.0031,b-e_q_loss:0.0112,h-rec_loss:0.0345,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 08:13:14,011-215-global_steps:192400,b-rec_loss:0.0208,b-velocity_loss:0.0031,b-e_q_loss:0.0112,h-rec_loss:0.0345,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 08:15:29,788-215-global_steps:192800,b-rec_loss:0.0208,b-velocity_loss:0.0031,b-e_q_loss:0.0112,h-rec_loss:0.0345,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 08:17:48,266-215-global_steps:193200,b-rec_loss:0.0208,b-velocity_loss:0.0031,b-e_q_loss:0.0112,h-rec_loss:0.0345,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 08:18:07,608-278-epoch:68 +2022-11-01 08:20:05,475-215-global_steps:193600,b-rec_loss:0.0208,b-velocity_loss:0.0031,b-e_q_loss:0.0112,h-rec_loss:0.0344,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 08:22:20,817-215-global_steps:194000,b-rec_loss:0.0208,b-velocity_loss:0.0031,b-e_q_loss:0.0112,h-rec_loss:0.0344,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 08:24:36,426-215-global_steps:194400,b-rec_loss:0.0208,b-velocity_loss:0.0031,b-e_q_loss:0.0112,h-rec_loss:0.0344,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 08:26:54,084-215-global_steps:194800,b-rec_loss:0.0209,b-velocity_loss:0.0031,b-e_q_loss:0.0112,h-rec_loss:0.0344,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 08:29:10,537-215-global_steps:195200,b-rec_loss:0.0209,b-velocity_loss:0.0031,b-e_q_loss:0.0112,h-rec_loss:0.0344,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 08:31:25,649-215-global_steps:195600,b-rec_loss:0.0209,b-velocity_loss:0.0031,b-e_q_loss:0.0112,h-rec_loss:0.0344,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 08:33:41,123-215-global_steps:196000,b-rec_loss:0.0209,b-velocity_loss:0.0031,b-e_q_loss:0.0112,h-rec_loss:0.0344,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 08:34:15,404-278-epoch:69 +2022-11-01 08:36:00,067-215-global_steps:196400,b-rec_loss:0.0206,b-velocity_loss:0.0031,b-e_q_loss:0.0111,h-rec_loss:0.0343,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 08:38:16,378-215-global_steps:196800,b-rec_loss:0.0206,b-velocity_loss:0.0031,b-e_q_loss:0.0111,h-rec_loss:0.0343,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 08:40:32,613-215-global_steps:197200,b-rec_loss:0.0207,b-velocity_loss:0.0031,b-e_q_loss:0.0111,h-rec_loss:0.0343,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 08:42:48,089-215-global_steps:197600,b-rec_loss:0.0207,b-velocity_loss:0.0031,b-e_q_loss:0.0111,h-rec_loss:0.0343,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 08:45:04,645-215-global_steps:198000,b-rec_loss:0.0207,b-velocity_loss:0.0031,b-e_q_loss:0.0111,h-rec_loss:0.0343,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 08:47:21,034-215-global_steps:198400,b-rec_loss:0.0207,b-velocity_loss:0.0031,b-e_q_loss:0.0111,h-rec_loss:0.0344,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 08:49:35,862-215-global_steps:198800,b-rec_loss:0.0207,b-velocity_loss:0.0031,b-e_q_loss:0.0111,h-rec_loss:0.0346,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 08:50:24,101-278-epoch:70 +2022-11-01 08:51:53,426-215-global_steps:199200,b-rec_loss:0.0206,b-velocity_loss:0.0031,b-e_q_loss:0.0110,h-rec_loss:0.0343,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 08:54:10,375-215-global_steps:199600,b-rec_loss:0.0210,b-velocity_loss:0.0031,b-e_q_loss:0.0112,h-rec_loss:0.0343,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 08:56:26,326-215-global_steps:200000,b-rec_loss:0.0209,b-velocity_loss:0.0031,b-e_q_loss:0.0112,h-rec_loss:0.0343,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 08:58:40,754-215-global_steps:200400,b-rec_loss:0.0208,b-velocity_loss:0.0031,b-e_q_loss:0.0112,h-rec_loss:0.0343,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:00:56,913-215-global_steps:200800,b-rec_loss:0.0208,b-velocity_loss:0.0031,b-e_q_loss:0.0111,h-rec_loss:0.0343,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:03:15,419-215-global_steps:201200,b-rec_loss:0.0207,b-velocity_loss:0.0031,b-e_q_loss:0.0111,h-rec_loss:0.0343,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:05:31,524-215-global_steps:201600,b-rec_loss:0.0207,b-velocity_loss:0.0031,b-e_q_loss:0.0111,h-rec_loss:0.0343,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:06:33,509-278-epoch:71 +2022-11-01 09:07:48,625-215-global_steps:202000,b-rec_loss:0.0205,b-velocity_loss:0.0031,b-e_q_loss:0.0111,h-rec_loss:0.0342,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:10:05,437-215-global_steps:202400,b-rec_loss:0.0205,b-velocity_loss:0.0031,b-e_q_loss:0.0111,h-rec_loss:0.0342,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:12:22,674-215-global_steps:202800,b-rec_loss:0.0206,b-velocity_loss:0.0031,b-e_q_loss:0.0111,h-rec_loss:0.0342,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:14:40,190-215-global_steps:203200,b-rec_loss:0.0206,b-velocity_loss:0.0031,b-e_q_loss:0.0111,h-rec_loss:0.0342,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:16:56,059-215-global_steps:203600,b-rec_loss:0.0205,b-velocity_loss:0.0031,b-e_q_loss:0.0111,h-rec_loss:0.0342,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:19:14,137-215-global_steps:204000,b-rec_loss:0.0205,b-velocity_loss:0.0031,b-e_q_loss:0.0111,h-rec_loss:0.0342,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:21:33,647-215-global_steps:204400,b-rec_loss:0.0205,b-velocity_loss:0.0031,b-e_q_loss:0.0111,h-rec_loss:0.0342,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:22:50,041-278-epoch:72 +2022-11-01 09:23:50,935-215-global_steps:204800,b-rec_loss:0.0219,b-velocity_loss:0.0033,b-e_q_loss:0.0113,h-rec_loss:0.0341,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:26:06,420-215-global_steps:205200,b-rec_loss:0.0214,b-velocity_loss:0.0032,b-e_q_loss:0.0112,h-rec_loss:0.0341,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:28:24,416-215-global_steps:205600,b-rec_loss:0.0211,b-velocity_loss:0.0032,b-e_q_loss:0.0112,h-rec_loss:0.0342,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:30:41,749-215-global_steps:206000,b-rec_loss:0.0209,b-velocity_loss:0.0032,b-e_q_loss:0.0111,h-rec_loss:0.0342,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:32:57,670-215-global_steps:206400,b-rec_loss:0.0208,b-velocity_loss:0.0031,b-e_q_loss:0.0111,h-rec_loss:0.0342,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:35:13,578-215-global_steps:206800,b-rec_loss:0.0208,b-velocity_loss:0.0031,b-e_q_loss:0.0111,h-rec_loss:0.0342,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:37:31,983-215-global_steps:207200,b-rec_loss:0.0207,b-velocity_loss:0.0031,b-e_q_loss:0.0111,h-rec_loss:0.0342,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:39:03,066-278-epoch:73 +2022-11-01 09:39:49,736-215-global_steps:207600,b-rec_loss:0.0207,b-velocity_loss:0.0031,b-e_q_loss:0.0111,h-rec_loss:0.0341,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:42:05,553-215-global_steps:208000,b-rec_loss:0.0205,b-velocity_loss:0.0031,b-e_q_loss:0.0110,h-rec_loss:0.0341,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:44:20,634-215-global_steps:208400,b-rec_loss:0.0204,b-velocity_loss:0.0031,b-e_q_loss:0.0110,h-rec_loss:0.0342,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:46:37,668-215-global_steps:208800,b-rec_loss:0.0204,b-velocity_loss:0.0031,b-e_q_loss:0.0110,h-rec_loss:0.0342,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:48:55,414-215-global_steps:209200,b-rec_loss:0.0204,b-velocity_loss:0.0031,b-e_q_loss:0.0110,h-rec_loss:0.0342,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:51:10,730-215-global_steps:209600,b-rec_loss:0.0204,b-velocity_loss:0.0031,b-e_q_loss:0.0110,h-rec_loss:0.0342,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:53:25,248-215-global_steps:210000,b-rec_loss:0.0204,b-velocity_loss:0.0031,b-e_q_loss:0.0110,h-rec_loss:0.0342,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 09:55:10,191-278-epoch:74 +2022-11-01 09:55:42,616-215-global_steps:210400,b-rec_loss:0.0203,b-velocity_loss:0.0031,b-e_q_loss:0.0109,h-rec_loss:0.0339,h-velocity_loss:0.0207,h-e_q_loss:0.0108 +2022-11-01 09:57:58,649-215-global_steps:210800,b-rec_loss:0.0203,b-velocity_loss:0.0031,b-e_q_loss:0.0109,h-rec_loss:0.0340,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:00:12,868-215-global_steps:211200,b-rec_loss:0.0203,b-velocity_loss:0.0031,b-e_q_loss:0.0109,h-rec_loss:0.0341,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:02:27,556-215-global_steps:211600,b-rec_loss:0.0203,b-velocity_loss:0.0031,b-e_q_loss:0.0109,h-rec_loss:0.0341,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:04:42,987-215-global_steps:212000,b-rec_loss:0.0204,b-velocity_loss:0.0031,b-e_q_loss:0.0109,h-rec_loss:0.0341,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:06:58,759-215-global_steps:212400,b-rec_loss:0.0204,b-velocity_loss:0.0031,b-e_q_loss:0.0109,h-rec_loss:0.0341,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:09:15,700-215-global_steps:212800,b-rec_loss:0.0204,b-velocity_loss:0.0031,b-e_q_loss:0.0109,h-rec_loss:0.0341,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:11:14,414-278-epoch:75 +2022-11-01 10:11:32,701-215-global_steps:213200,b-rec_loss:0.0211,b-velocity_loss:0.0032,b-e_q_loss:0.0112,h-rec_loss:0.0340,h-velocity_loss:0.0207,h-e_q_loss:0.0108 +2022-11-01 10:13:49,120-215-global_steps:213600,b-rec_loss:0.0205,b-velocity_loss:0.0031,b-e_q_loss:0.0110,h-rec_loss:0.0340,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:16:10,224-215-global_steps:214000,b-rec_loss:0.0204,b-velocity_loss:0.0031,b-e_q_loss:0.0109,h-rec_loss:0.0341,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:18:39,673-215-global_steps:214400,b-rec_loss:0.0203,b-velocity_loss:0.0031,b-e_q_loss:0.0109,h-rec_loss:0.0340,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:20:54,803-215-global_steps:214800,b-rec_loss:0.0203,b-velocity_loss:0.0031,b-e_q_loss:0.0109,h-rec_loss:0.0340,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:23:11,043-215-global_steps:215200,b-rec_loss:0.0203,b-velocity_loss:0.0031,b-e_q_loss:0.0109,h-rec_loss:0.0340,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:25:25,707-215-global_steps:215600,b-rec_loss:0.0203,b-velocity_loss:0.0031,b-e_q_loss:0.0109,h-rec_loss:0.0340,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:27:38,367-278-epoch:76 +2022-11-01 10:27:42,602-215-global_steps:216000,b-rec_loss:0.0201,b-velocity_loss:0.0031,b-e_q_loss:0.0110,h-rec_loss:0.0339,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 10:29:57,925-215-global_steps:216400,b-rec_loss:0.0201,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0339,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:32:13,536-215-global_steps:216800,b-rec_loss:0.0202,b-velocity_loss:0.0031,b-e_q_loss:0.0109,h-rec_loss:0.0340,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:34:28,030-215-global_steps:217200,b-rec_loss:0.0202,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0340,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:36:41,772-215-global_steps:217600,b-rec_loss:0.0202,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0340,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:38:55,457-215-global_steps:218000,b-rec_loss:0.0202,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0339,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:41:09,201-215-global_steps:218400,b-rec_loss:0.0202,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0339,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:43:22,817-215-global_steps:218800,b-rec_loss:0.0202,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0340,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:43:34,397-278-epoch:77 +2022-11-01 10:45:38,584-215-global_steps:219200,b-rec_loss:0.0203,b-velocity_loss:0.0031,b-e_q_loss:0.0109,h-rec_loss:0.0339,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:47:51,980-215-global_steps:219600,b-rec_loss:0.0202,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0339,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:50:06,696-215-global_steps:220000,b-rec_loss:0.0202,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0339,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 10:52:19,936-215-global_steps:220400,b-rec_loss:0.0202,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0339,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:54:33,256-215-global_steps:220800,b-rec_loss:0.0201,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0339,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:56:46,970-215-global_steps:221200,b-rec_loss:0.0202,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0339,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:59:01,254-215-global_steps:221600,b-rec_loss:0.0201,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0339,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 10:59:27,171-278-epoch:78 +2022-11-01 11:01:16,343-215-global_steps:222000,b-rec_loss:0.0200,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0338,h-velocity_loss:0.0208,h-e_q_loss:0.0107 +2022-11-01 11:03:30,489-215-global_steps:222400,b-rec_loss:0.0200,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0339,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 11:05:43,738-215-global_steps:222800,b-rec_loss:0.0200,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0339,h-velocity_loss:0.0208,h-e_q_loss:0.0107 +2022-11-01 11:07:57,701-215-global_steps:223200,b-rec_loss:0.0200,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0339,h-velocity_loss:0.0208,h-e_q_loss:0.0107 +2022-11-01 11:10:12,196-215-global_steps:223600,b-rec_loss:0.0200,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0339,h-velocity_loss:0.0208,h-e_q_loss:0.0107 +2022-11-01 11:12:25,266-215-global_steps:224000,b-rec_loss:0.0203,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0339,h-velocity_loss:0.0208,h-e_q_loss:0.0107 +2022-11-01 11:14:38,042-215-global_steps:224400,b-rec_loss:0.0203,b-velocity_loss:0.0031,b-e_q_loss:0.0109,h-rec_loss:0.0339,h-velocity_loss:0.0208,h-e_q_loss:0.0107 +2022-11-01 11:15:17,340-278-epoch:79 +2022-11-01 11:16:53,300-215-global_steps:224800,b-rec_loss:0.0200,b-velocity_loss:0.0031,b-e_q_loss:0.0107,h-rec_loss:0.0338,h-velocity_loss:0.0208,h-e_q_loss:0.0107 +2022-11-01 11:19:05,987-215-global_steps:225200,b-rec_loss:0.0200,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0337,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 11:21:19,428-215-global_steps:225600,b-rec_loss:0.0200,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0338,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 11:23:33,409-215-global_steps:226000,b-rec_loss:0.0200,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0338,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 11:25:47,325-215-global_steps:226400,b-rec_loss:0.0200,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0338,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 11:28:00,922-215-global_steps:226800,b-rec_loss:0.0200,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0338,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 11:30:14,232-215-global_steps:227200,b-rec_loss:0.0200,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0338,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 11:31:08,163-278-epoch:80 +2022-11-01 11:32:29,800-215-global_steps:227600,b-rec_loss:0.0199,b-velocity_loss:0.0031,b-e_q_loss:0.0107,h-rec_loss:0.0337,h-velocity_loss:0.0206,h-e_q_loss:0.0107 +2022-11-01 11:34:43,758-215-global_steps:228000,b-rec_loss:0.0199,b-velocity_loss:0.0031,b-e_q_loss:0.0107,h-rec_loss:0.0339,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 11:36:57,848-215-global_steps:228400,b-rec_loss:0.0199,b-velocity_loss:0.0031,b-e_q_loss:0.0107,h-rec_loss:0.0342,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 11:39:10,478-215-global_steps:228800,b-rec_loss:0.0199,b-velocity_loss:0.0031,b-e_q_loss:0.0107,h-rec_loss:0.0341,h-velocity_loss:0.0208,h-e_q_loss:0.0109 +2022-11-01 11:41:23,275-215-global_steps:229200,b-rec_loss:0.0199,b-velocity_loss:0.0031,b-e_q_loss:0.0107,h-rec_loss:0.0340,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 11:43:37,633-215-global_steps:229600,b-rec_loss:0.0199,b-velocity_loss:0.0031,b-e_q_loss:0.0107,h-rec_loss:0.0340,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 11:45:52,022-215-global_steps:230000,b-rec_loss:0.0199,b-velocity_loss:0.0031,b-e_q_loss:0.0107,h-rec_loss:0.0339,h-velocity_loss:0.0208,h-e_q_loss:0.0108 +2022-11-01 11:47:00,002-278-epoch:81 +2022-11-01 11:48:06,910-215-global_steps:230400,b-rec_loss:0.0198,b-velocity_loss:0.0031,b-e_q_loss:0.0107,h-rec_loss:0.0336,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 11:50:20,553-215-global_steps:230800,b-rec_loss:0.0206,b-velocity_loss:0.0031,b-e_q_loss:0.0109,h-rec_loss:0.0336,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 11:52:35,548-215-global_steps:231200,b-rec_loss:0.0205,b-velocity_loss:0.0031,b-e_q_loss:0.0109,h-rec_loss:0.0337,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 11:54:49,831-215-global_steps:231600,b-rec_loss:0.0204,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0337,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 11:57:02,997-215-global_steps:232000,b-rec_loss:0.0203,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0337,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 11:59:17,832-215-global_steps:232400,b-rec_loss:0.0202,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0337,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 12:01:31,609-215-global_steps:232800,b-rec_loss:0.0202,b-velocity_loss:0.0031,b-e_q_loss:0.0108,h-rec_loss:0.0337,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 12:02:53,311-278-epoch:82 +2022-11-01 12:03:47,360-215-global_steps:233200,b-rec_loss:0.0198,b-velocity_loss:0.0031,b-e_q_loss:0.0106,h-rec_loss:0.0337,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 12:06:01,330-215-global_steps:233600,b-rec_loss:0.0198,b-velocity_loss:0.0031,b-e_q_loss:0.0106,h-rec_loss:0.0336,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 12:08:15,166-215-global_steps:234000,b-rec_loss:0.0198,b-velocity_loss:0.0031,b-e_q_loss:0.0106,h-rec_loss:0.0336,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 12:10:29,524-215-global_steps:234400,b-rec_loss:0.0198,b-velocity_loss:0.0031,b-e_q_loss:0.0106,h-rec_loss:0.0336,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 12:12:43,845-215-global_steps:234800,b-rec_loss:0.0198,b-velocity_loss:0.0031,b-e_q_loss:0.0107,h-rec_loss:0.0337,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 12:14:58,122-215-global_steps:235200,b-rec_loss:0.0198,b-velocity_loss:0.0031,b-e_q_loss:0.0106,h-rec_loss:0.0337,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 12:17:12,514-215-global_steps:235600,b-rec_loss:0.0198,b-velocity_loss:0.0031,b-e_q_loss:0.0106,h-rec_loss:0.0337,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 12:18:50,600-278-epoch:83 +2022-11-01 12:19:30,969-215-global_steps:236000,b-rec_loss:0.0201,b-velocity_loss:0.0031,b-e_q_loss:0.0107,h-rec_loss:0.0336,h-velocity_loss:0.0208,h-e_q_loss:0.0107 +2022-11-01 12:21:47,124-215-global_steps:236400,b-rec_loss:0.0201,b-velocity_loss:0.0031,b-e_q_loss:0.0107,h-rec_loss:0.0336,h-velocity_loss:0.0208,h-e_q_loss:0.0107 +2022-11-01 12:24:02,149-215-global_steps:236800,b-rec_loss:0.0200,b-velocity_loss:0.0031,b-e_q_loss:0.0107,h-rec_loss:0.0336,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 12:26:17,226-215-global_steps:237200,b-rec_loss:0.0199,b-velocity_loss:0.0031,b-e_q_loss:0.0107,h-rec_loss:0.0336,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 12:28:35,287-215-global_steps:237600,b-rec_loss:0.0199,b-velocity_loss:0.0031,b-e_q_loss:0.0106,h-rec_loss:0.0336,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 12:30:52,002-215-global_steps:238000,b-rec_loss:0.0199,b-velocity_loss:0.0031,b-e_q_loss:0.0106,h-rec_loss:0.0336,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 12:33:07,376-215-global_steps:238400,b-rec_loss:0.0198,b-velocity_loss:0.0031,b-e_q_loss:0.0106,h-rec_loss:0.0336,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 12:34:58,153-278-epoch:84 +2022-11-01 12:35:23,779-215-global_steps:238800,b-rec_loss:0.0197,b-velocity_loss:0.0031,b-e_q_loss:0.0106,h-rec_loss:0.0336,h-velocity_loss:0.0208,h-e_q_loss:0.0107 +2022-11-01 12:37:38,857-215-global_steps:239200,b-rec_loss:0.0196,b-velocity_loss:0.0031,b-e_q_loss:0.0106,h-rec_loss:0.0335,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 12:39:55,893-215-global_steps:239600,b-rec_loss:0.0197,b-velocity_loss:0.0031,b-e_q_loss:0.0106,h-rec_loss:0.0335,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 12:42:09,955-215-global_steps:240000,b-rec_loss:0.0197,b-velocity_loss:0.0031,b-e_q_loss:0.0106,h-rec_loss:0.0335,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 12:44:24,097-215-global_steps:240400,b-rec_loss:0.0197,b-velocity_loss:0.0031,b-e_q_loss:0.0106,h-rec_loss:0.0335,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 12:46:39,702-215-global_steps:240800,b-rec_loss:0.0197,b-velocity_loss:0.0031,b-e_q_loss:0.0106,h-rec_loss:0.0335,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 12:48:55,748-215-global_steps:241200,b-rec_loss:0.0197,b-velocity_loss:0.0031,b-e_q_loss:0.0106,h-rec_loss:0.0336,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 12:51:01,660-278-epoch:85 +2022-11-01 12:51:12,950-215-global_steps:241600,b-rec_loss:0.0197,b-velocity_loss:0.0031,b-e_q_loss:0.0106,h-rec_loss:0.0335,h-velocity_loss:0.0208,h-e_q_loss:0.0107 +2022-11-01 12:53:27,669-215-global_steps:242000,b-rec_loss:0.0196,b-velocity_loss:0.0031,b-e_q_loss:0.0105,h-rec_loss:0.0335,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 12:55:44,906-215-global_steps:242400,b-rec_loss:0.0197,b-velocity_loss:0.0031,b-e_q_loss:0.0106,h-rec_loss:0.0335,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 12:58:02,234-215-global_steps:242800,b-rec_loss:0.0197,b-velocity_loss:0.0031,b-e_q_loss:0.0106,h-rec_loss:0.0335,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 13:00:18,445-215-global_steps:243200,b-rec_loss:0.0197,b-velocity_loss:0.0031,b-e_q_loss:0.0106,h-rec_loss:0.0335,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 13:02:33,227-215-global_steps:243600,b-rec_loss:0.0197,b-velocity_loss:0.0031,b-e_q_loss:0.0106,h-rec_loss:0.0335,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 13:04:46,714-215-global_steps:244000,b-rec_loss:0.0197,b-velocity_loss:0.0031,b-e_q_loss:0.0106,h-rec_loss:0.0335,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 13:07:03,176-215-global_steps:244400,b-rec_loss:0.0197,b-velocity_loss:0.0031,b-e_q_loss:0.0106,h-rec_loss:0.0335,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 13:07:07,504-278-epoch:86 +2022-11-01 13:09:22,492-215-global_steps:244800,b-rec_loss:0.0196,b-velocity_loss:0.0031,b-e_q_loss:0.0105,h-rec_loss:0.0335,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 13:11:40,088-215-global_steps:245200,b-rec_loss:0.0196,b-velocity_loss:0.0031,b-e_q_loss:0.0105,h-rec_loss:0.0335,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 13:13:55,806-215-global_steps:245600,b-rec_loss:0.0196,b-velocity_loss:0.0031,b-e_q_loss:0.0105,h-rec_loss:0.0336,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 13:16:12,278-215-global_steps:246000,b-rec_loss:0.0196,b-velocity_loss:0.0031,b-e_q_loss:0.0105,h-rec_loss:0.0336,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 13:18:30,561-215-global_steps:246400,b-rec_loss:0.0196,b-velocity_loss:0.0031,b-e_q_loss:0.0105,h-rec_loss:0.0336,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 13:20:47,730-215-global_steps:246800,b-rec_loss:0.0196,b-velocity_loss:0.0031,b-e_q_loss:0.0105,h-rec_loss:0.0336,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 13:23:04,683-215-global_steps:247200,b-rec_loss:0.0196,b-velocity_loss:0.0031,b-e_q_loss:0.0105,h-rec_loss:0.0335,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 13:23:22,950-278-epoch:87 +2022-11-01 13:25:22,193-215-global_steps:247600,b-rec_loss:0.0195,b-velocity_loss:0.0031,b-e_q_loss:0.0105,h-rec_loss:0.0334,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 13:27:39,476-215-global_steps:248000,b-rec_loss:0.0196,b-velocity_loss:0.0031,b-e_q_loss:0.0105,h-rec_loss:0.0334,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 13:29:58,050-215-global_steps:248400,b-rec_loss:0.0196,b-velocity_loss:0.0031,b-e_q_loss:0.0105,h-rec_loss:0.0334,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 13:32:14,036-215-global_steps:248800,b-rec_loss:0.0196,b-velocity_loss:0.0031,b-e_q_loss:0.0105,h-rec_loss:0.0334,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 13:34:28,725-215-global_steps:249200,b-rec_loss:0.0196,b-velocity_loss:0.0031,b-e_q_loss:0.0105,h-rec_loss:0.0334,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 13:36:44,324-215-global_steps:249600,b-rec_loss:0.0196,b-velocity_loss:0.0031,b-e_q_loss:0.0105,h-rec_loss:0.0334,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 13:39:04,061-215-global_steps:250000,b-rec_loss:0.0196,b-velocity_loss:0.0031,b-e_q_loss:0.0105,h-rec_loss:0.0334,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 13:39:37,023-278-epoch:88 +2022-11-01 13:41:22,279-215-global_steps:250400,b-rec_loss:0.0195,b-velocity_loss:0.0031,b-e_q_loss:0.0105,h-rec_loss:0.0333,h-velocity_loss:0.0206,h-e_q_loss:0.0106 +2022-11-01 13:43:38,876-215-global_steps:250800,b-rec_loss:0.0195,b-velocity_loss:0.0031,b-e_q_loss:0.0105,h-rec_loss:0.0333,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 13:45:55,549-215-global_steps:251200,b-rec_loss:0.0195,b-velocity_loss:0.0031,b-e_q_loss:0.0105,h-rec_loss:0.0334,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 13:48:15,363-215-global_steps:251600,b-rec_loss:0.0195,b-velocity_loss:0.0031,b-e_q_loss:0.0105,h-rec_loss:0.0333,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 13:50:34,156-215-global_steps:252000,b-rec_loss:0.0195,b-velocity_loss:0.0031,b-e_q_loss:0.0105,h-rec_loss:0.0334,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 13:52:51,077-215-global_steps:252400,b-rec_loss:0.0195,b-velocity_loss:0.0031,b-e_q_loss:0.0105,h-rec_loss:0.0334,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 13:55:06,694-215-global_steps:252800,b-rec_loss:0.0195,b-velocity_loss:0.0031,b-e_q_loss:0.0105,h-rec_loss:0.0334,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 13:55:53,862-278-epoch:89 +2022-11-01 13:57:24,162-215-global_steps:253200,b-rec_loss:0.0194,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0334,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 13:59:40,798-215-global_steps:253600,b-rec_loss:0.0194,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0333,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 14:01:58,584-215-global_steps:254000,b-rec_loss:0.0194,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0333,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 14:04:13,191-215-global_steps:254400,b-rec_loss:0.0194,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0333,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 14:06:28,078-215-global_steps:254800,b-rec_loss:0.0194,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0333,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 14:08:45,930-215-global_steps:255200,b-rec_loss:0.0194,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0333,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 14:11:03,616-215-global_steps:255600,b-rec_loss:0.0194,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0333,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 14:12:06,611-278-epoch:90 +2022-11-01 14:13:21,946-215-global_steps:256000,b-rec_loss:0.0194,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0332,h-velocity_loss:0.0206,h-e_q_loss:0.0106 +2022-11-01 14:15:36,908-215-global_steps:256400,b-rec_loss:0.0194,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0333,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 14:17:53,652-215-global_steps:256800,b-rec_loss:0.0194,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0333,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 14:20:11,693-215-global_steps:257200,b-rec_loss:0.0194,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0333,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 14:22:28,027-215-global_steps:257600,b-rec_loss:0.0194,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0333,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 14:24:43,158-215-global_steps:258000,b-rec_loss:0.0194,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0333,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 14:26:59,695-215-global_steps:258400,b-rec_loss:0.0194,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0333,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 14:28:17,845-278-epoch:91 +2022-11-01 14:29:19,942-215-global_steps:258800,b-rec_loss:0.0194,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0331,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 14:31:39,172-215-global_steps:259200,b-rec_loss:0.0193,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0332,h-velocity_loss:0.0206,h-e_q_loss:0.0106 +2022-11-01 14:33:54,479-215-global_steps:259600,b-rec_loss:0.0193,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0332,h-velocity_loss:0.0206,h-e_q_loss:0.0106 +2022-11-01 14:36:08,416-215-global_steps:260000,b-rec_loss:0.0193,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0332,h-velocity_loss:0.0206,h-e_q_loss:0.0106 +2022-11-01 14:38:23,564-215-global_steps:260400,b-rec_loss:0.0193,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0333,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 14:40:40,449-215-global_steps:260800,b-rec_loss:0.0193,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0333,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 14:42:56,660-215-global_steps:261200,b-rec_loss:0.0193,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0333,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 14:44:25,435-278-epoch:92 +2022-11-01 14:45:12,036-215-global_steps:261600,b-rec_loss:0.0193,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0331,h-velocity_loss:0.0206,h-e_q_loss:0.0106 +2022-11-01 14:47:26,310-215-global_steps:262000,b-rec_loss:0.0194,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0331,h-velocity_loss:0.0206,h-e_q_loss:0.0106 +2022-11-01 14:49:42,184-215-global_steps:262400,b-rec_loss:0.0194,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0331,h-velocity_loss:0.0206,h-e_q_loss:0.0106 +2022-11-01 14:51:58,578-215-global_steps:262800,b-rec_loss:0.0194,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0332,h-velocity_loss:0.0206,h-e_q_loss:0.0106 +2022-11-01 14:54:13,072-215-global_steps:263200,b-rec_loss:0.0193,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0332,h-velocity_loss:0.0206,h-e_q_loss:0.0106 +2022-11-01 14:56:27,214-215-global_steps:263600,b-rec_loss:0.0193,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0332,h-velocity_loss:0.0206,h-e_q_loss:0.0106 +2022-11-01 14:58:41,457-215-global_steps:264000,b-rec_loss:0.0194,b-velocity_loss:0.0031,b-e_q_loss:0.0104,h-rec_loss:0.0332,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 15:00:26,940-278-epoch:93 +2022-11-01 15:01:00,861-215-global_steps:264400,b-rec_loss:0.0192,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0330,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 15:03:18,021-215-global_steps:264800,b-rec_loss:0.0192,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0331,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 15:05:33,784-215-global_steps:265200,b-rec_loss:0.0192,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0331,h-velocity_loss:0.0207,h-e_q_loss:0.0105 +2022-11-01 15:07:49,067-215-global_steps:265600,b-rec_loss:0.0192,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0331,h-velocity_loss:0.0206,h-e_q_loss:0.0106 +2022-11-01 15:10:05,438-215-global_steps:266000,b-rec_loss:0.0192,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0331,h-velocity_loss:0.0206,h-e_q_loss:0.0106 +2022-11-01 15:12:23,911-215-global_steps:266400,b-rec_loss:0.0192,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0332,h-velocity_loss:0.0206,h-e_q_loss:0.0106 +2022-11-01 15:14:41,206-215-global_steps:266800,b-rec_loss:0.0192,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0332,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 15:16:39,253-278-epoch:94 +2022-11-01 15:16:58,146-215-global_steps:267200,b-rec_loss:0.0191,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0332,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 15:19:12,984-215-global_steps:267600,b-rec_loss:0.0191,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0331,h-velocity_loss:0.0207,h-e_q_loss:0.0105 +2022-11-01 15:21:27,336-215-global_steps:268000,b-rec_loss:0.0191,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0331,h-velocity_loss:0.0206,h-e_q_loss:0.0106 +2022-11-01 15:23:44,684-215-global_steps:268400,b-rec_loss:0.0192,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0331,h-velocity_loss:0.0206,h-e_q_loss:0.0106 +2022-11-01 15:26:02,610-215-global_steps:268800,b-rec_loss:0.0192,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0331,h-velocity_loss:0.0206,h-e_q_loss:0.0106 +2022-11-01 15:28:18,268-215-global_steps:269200,b-rec_loss:0.0192,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0331,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 15:30:34,561-215-global_steps:269600,b-rec_loss:0.0192,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0331,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 15:32:50,633-278-epoch:95 +2022-11-01 15:32:55,532-215-global_steps:270000,b-rec_loss:0.0190,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0332,h-velocity_loss:0.0208,h-e_q_loss:0.0106 +2022-11-01 15:35:13,372-215-global_steps:270400,b-rec_loss:0.0191,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0331,h-velocity_loss:0.0207,h-e_q_loss:0.0105 +2022-11-01 15:37:31,376-215-global_steps:270800,b-rec_loss:0.0191,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0332,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 15:39:46,750-215-global_steps:271200,b-rec_loss:0.0191,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0334,h-velocity_loss:0.0207,h-e_q_loss:0.0107 +2022-11-01 15:42:01,436-215-global_steps:271600,b-rec_loss:0.0191,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0333,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 15:44:18,421-215-global_steps:272000,b-rec_loss:0.0191,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0333,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 15:46:35,345-215-global_steps:272400,b-rec_loss:0.0193,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0333,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 15:48:50,935-215-global_steps:272800,b-rec_loss:0.0193,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0332,h-velocity_loss:0.0207,h-e_q_loss:0.0106 +2022-11-01 15:49:01,894-278-epoch:96 +2022-11-01 15:51:07,868-215-global_steps:273200,b-rec_loss:0.0192,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0330,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 15:53:25,681-215-global_steps:273600,b-rec_loss:0.0192,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0330,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 15:55:40,563-215-global_steps:274000,b-rec_loss:0.0192,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0330,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 15:57:54,787-215-global_steps:274400,b-rec_loss:0.0191,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0330,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 16:00:08,553-215-global_steps:274800,b-rec_loss:0.0191,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0330,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 16:02:22,212-215-global_steps:275200,b-rec_loss:0.0191,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0330,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 16:04:36,871-215-global_steps:275600,b-rec_loss:0.0191,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0330,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 16:05:02,051-278-epoch:97 +2022-11-01 16:06:53,333-215-global_steps:276000,b-rec_loss:0.0190,b-velocity_loss:0.0031,b-e_q_loss:0.0103,h-rec_loss:0.0329,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 16:09:07,068-215-global_steps:276400,b-rec_loss:0.0190,b-velocity_loss:0.0031,b-e_q_loss:0.0102,h-rec_loss:0.0330,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 16:11:20,430-215-global_steps:276800,b-rec_loss:0.0190,b-velocity_loss:0.0031,b-e_q_loss:0.0102,h-rec_loss:0.0330,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 16:13:35,003-215-global_steps:277200,b-rec_loss:0.0190,b-velocity_loss:0.0031,b-e_q_loss:0.0102,h-rec_loss:0.0330,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 16:15:49,860-215-global_steps:277600,b-rec_loss:0.0190,b-velocity_loss:0.0031,b-e_q_loss:0.0102,h-rec_loss:0.0330,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 16:18:06,129-215-global_steps:278000,b-rec_loss:0.0190,b-velocity_loss:0.0031,b-e_q_loss:0.0102,h-rec_loss:0.0330,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 16:20:22,141-215-global_steps:278400,b-rec_loss:0.0190,b-velocity_loss:0.0031,b-e_q_loss:0.0102,h-rec_loss:0.0330,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 16:21:00,868-278-epoch:98 +2022-11-01 16:22:37,015-215-global_steps:278800,b-rec_loss:0.0190,b-velocity_loss:0.0031,b-e_q_loss:0.0102,h-rec_loss:0.0329,h-velocity_loss:0.0207,h-e_q_loss:0.0105 +2022-11-01 16:24:50,852-215-global_steps:279200,b-rec_loss:0.0190,b-velocity_loss:0.0031,b-e_q_loss:0.0102,h-rec_loss:0.0329,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 16:27:07,120-215-global_steps:279600,b-rec_loss:0.0190,b-velocity_loss:0.0031,b-e_q_loss:0.0102,h-rec_loss:0.0330,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 16:29:21,413-215-global_steps:280000,b-rec_loss:0.0190,b-velocity_loss:0.0031,b-e_q_loss:0.0102,h-rec_loss:0.0330,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 16:31:36,100-215-global_steps:280400,b-rec_loss:0.0190,b-velocity_loss:0.0031,b-e_q_loss:0.0102,h-rec_loss:0.0330,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 16:33:50,353-215-global_steps:280800,b-rec_loss:0.0190,b-velocity_loss:0.0031,b-e_q_loss:0.0102,h-rec_loss:0.0330,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 16:36:06,291-215-global_steps:281200,b-rec_loss:0.0190,b-velocity_loss:0.0031,b-e_q_loss:0.0102,h-rec_loss:0.0330,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 16:37:01,091-278-epoch:99 +2022-11-01 16:38:24,484-215-global_steps:281600,b-rec_loss:0.0189,b-velocity_loss:0.0030,b-e_q_loss:0.0102,h-rec_loss:0.0328,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 16:40:41,362-215-global_steps:282000,b-rec_loss:0.0190,b-velocity_loss:0.0031,b-e_q_loss:0.0102,h-rec_loss:0.0329,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 16:42:56,647-215-global_steps:282400,b-rec_loss:0.0190,b-velocity_loss:0.0031,b-e_q_loss:0.0102,h-rec_loss:0.0329,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 16:45:12,677-215-global_steps:282800,b-rec_loss:0.0190,b-velocity_loss:0.0031,b-e_q_loss:0.0102,h-rec_loss:0.0329,h-velocity_loss:0.0206,h-e_q_loss:0.0105 +2022-11-01 16:47:30,128-215-global_steps:283200,b-rec_loss:0.0190,b-velocity_loss:0.0031,b-e_q_loss:0.0102,h-rec_loss:0.0330,h-velocity_loss:0.0206,h-e_q_loss:0.0106 +2022-11-01 16:49:49,220-215-global_steps:283600,b-rec_loss:0.0190,b-velocity_loss:0.0031,b-e_q_loss:0.0102,h-rec_loss:0.0331,h-velocity_loss:0.0206,h-e_q_loss:0.0106 +2022-11-01 16:52:05,563-215-global_steps:284000,b-rec_loss:0.0190,b-velocity_loss:0.0031,b-e_q_loss:0.0102,h-rec_loss:0.0331,h-velocity_loss:0.0206,h-e_q_loss:0.0106 diff --git a/experiments/2022-11-02-smplx_S2G-body-pixel-3d/smplx_S2G.json b/experiments/2022-11-02-smplx_S2G-body-pixel-3d/smplx_S2G.json new file mode 100644 index 0000000000000000000000000000000000000000..9b898b1fbab5fa8d05e26fa02c6bd3cdfc1789e4 --- /dev/null +++ b/experiments/2022-11-02-smplx_S2G-body-pixel-3d/smplx_S2G.json @@ -0,0 +1,89 @@ +{ + "config_root_path": "/is/cluster/scratch/hyi/ExpressiveBody/SMPLifyX4/scripts", + "dataset_load_mode": "pickle", + "store_file_path": "store.pkl", + "smplx_npz_path": "visualise/smplx_model/SMPLX_NEUTRAL_2020.npz", + "extra_joint_path": "visualise/smplx_model/smplx_extra_joints.yaml", + "j14_regressor_path": "visualise/smplx_model/SMPLX_to_J14.pkl", + "param": { + "w_j": 1, + "w_b": 1, + "w_h": 1 + }, + "Data": { + "data_root": "../../expressive_body-V0.7/", + "pklname": "_3d_ta_body_mfcc_4p.pkl", + "pose": { + "normalization": false, + "convert_to_6d": false, + "norm_method": "all", + "augmentation": false, + "generate_length": 88, + "pre_pose_length": 0, + "pose_dim": 99, + "expression": true + }, + "aud": { + "feat_method": "mfcc", + "aud_feat_dim": 64, + "aud_feat_win_size": null, + "context_info": false + } + }, + "Model": { + "model_type": "body", + "model_name": "s2g_body_pixel", + "composition": true, + "code_num": 2048, + "bh_model": true, + "AudioOpt": "Adam", + "encoder_choice": "mfcc", + "operation_kernel": "rnn", + "interaction": "concat", + "rnn_cell": "gru", + "T_layer_norm": true, + "bidirectional": true, + "residual": true, + "use_template": true, + "template_length": 64, + "gan": false, + "separate": true, + "l1_joints": false, + "radianloss": true + }, + "DataLoader": { + "batch_size": 128, + "num_workers": 8 + }, + "Train": { + "epochs": 100, + "max_gradient_norm": 5, + "recon_input": true, + "learning_rate": { + "generator_learning_rate": 1e-4, + "discriminator_learning_rate": 1e-4 + }, + "weights": { + "kl_tolerance": 0.02, + "velocity_length": 10, + "keypoint_loss_weight": 1, + "recon_input_weight": 1, + "kl_loss_weight": 0.2, + "kl_start_weight": 1e-5, + "kl_decay_rate": 0.9995, + "vel_loss_weight": 1, + "vel_start_weight": 1e-5, + "vel_decay_rate": 0.99995, + "r_loss_weight": 1, + "zero_loss_weight": 0, + "gan_loss_weight": 0.1, + "k": 1.0 + } + }, + "Log": { + "save_every": 50, + "print_every": 400, + "name": "body-pixel-w-bhmodel" + } +} + \ No newline at end of file diff --git a/experiments/2022-11-02-smplx_S2G-body-pixel-3d/train.log b/experiments/2022-11-02-smplx_S2G-body-pixel-3d/train.log new file mode 100644 index 0000000000000000000000000000000000000000..d8b27d07785bfcaea2dcc99cd7c50b962d125c0e --- /dev/null +++ b/experiments/2022-11-02-smplx_S2G-body-pixel-3d/train.log @@ -0,0 +1,826 @@ +2022-11-02 14:36:01,954-275-start_training +2022-11-02 14:36:01,954-278-epoch:0 +2022-11-02 14:53:21,173-275-start_training +2022-11-02 14:53:21,173-278-epoch:0 +2022-11-02 14:54:48,199-275-start_training +2022-11-02 14:54:48,199-278-epoch:0 +2022-11-02 14:56:01,232-215-global_steps:400,grad:0.9394,ce_loss:7.1587 +2022-11-02 14:57:07,176-275-start_training +2022-11-02 14:57:07,176-278-epoch:0 +2022-11-02 14:58:11,062-215-global_steps:800,grad:1.0641,ce_loss:6.8644 +2022-11-02 14:59:04,360-215-global_steps:400,grad:0.9383,ce_loss:7.1594 +2022-11-02 15:00:20,898-215-global_steps:1200,grad:1.1320,ce_loss:6.6317 +2022-11-02 15:01:01,578-215-global_steps:800,grad:1.0630,ce_loss:6.8652 +2022-11-02 15:01:23,755-275-start_training +2022-11-02 15:01:23,755-278-epoch:0 +2022-11-02 15:01:58,757-275-start_training +2022-11-02 15:01:58,757-278-epoch:0 +2022-11-02 15:04:08,091-215-global_steps:400,grad:0.9439,ce_loss:7.1507 +2022-11-02 15:06:16,364-215-global_steps:800,grad:1.0695,ce_loss:6.8554 +2022-11-02 15:08:24,745-215-global_steps:1200,grad:1.1373,ce_loss:6.6219 +2022-11-02 15:10:32,710-215-global_steps:1600,grad:1.1785,ce_loss:6.4419 +2022-11-02 15:12:40,494-215-global_steps:2000,grad:1.2051,ce_loss:6.3021 +2022-11-02 15:14:48,244-215-global_steps:2400,grad:1.2247,ce_loss:6.1887 +2022-11-02 15:16:56,402-215-global_steps:2800,grad:1.2406,ce_loss:6.0942 +2022-11-02 15:17:10,286-278-epoch:1 +2022-11-02 15:19:06,358-215-global_steps:3200,grad:1.3481,ce_loss:5.4293 +2022-11-02 15:21:14,398-215-global_steps:3600,grad:1.3574,ce_loss:5.3896 +2022-11-02 15:23:22,000-215-global_steps:4000,grad:1.3619,ce_loss:5.3569 +2022-11-02 15:25:30,817-215-global_steps:4400,grad:1.3675,ce_loss:5.3268 +2022-11-02 15:27:39,796-215-global_steps:4800,grad:1.3714,ce_loss:5.2988 +2022-11-02 15:29:48,674-215-global_steps:5200,grad:1.3751,ce_loss:5.2728 +2022-11-02 15:31:56,425-215-global_steps:5600,grad:1.3791,ce_loss:5.2480 +2022-11-02 15:32:23,784-278-epoch:2 +2022-11-02 15:34:06,863-215-global_steps:6000,grad:1.4163,ce_loss:5.0201 +2022-11-02 15:36:15,793-215-global_steps:6400,grad:1.4234,ce_loss:5.0034 +2022-11-02 15:38:24,171-215-global_steps:6800,grad:1.4274,ce_loss:4.9881 +2022-11-02 15:40:32,457-215-global_steps:7200,grad:1.4310,ce_loss:4.9732 +2022-11-02 15:42:40,789-215-global_steps:7600,grad:1.4347,ce_loss:4.9574 +2022-11-02 15:44:49,372-215-global_steps:8000,grad:1.4382,ce_loss:4.9416 +2022-11-02 15:46:57,924-215-global_steps:8400,grad:1.4412,ce_loss:4.9260 +2022-11-02 15:47:38,266-278-epoch:3 +2022-11-02 15:49:06,831-215-global_steps:8800,grad:1.4719,ce_loss:4.7691 +2022-11-02 15:51:14,946-215-global_steps:9200,grad:1.4806,ce_loss:4.7599 +2022-11-02 15:53:23,744-215-global_steps:9600,grad:1.4864,ce_loss:4.7470 +2022-11-02 15:55:32,695-215-global_steps:10000,grad:1.4910,ce_loss:4.7357 +2022-11-02 15:57:41,142-215-global_steps:10400,grad:1.4943,ce_loss:4.7236 +2022-11-02 15:59:50,053-215-global_steps:10800,grad:1.4978,ce_loss:4.7126 +2022-11-02 16:01:59,354-215-global_steps:11200,grad:1.5005,ce_loss:4.7009 +2022-11-02 16:02:53,876-278-epoch:4 +2022-11-02 16:04:10,028-215-global_steps:11600,grad:1.5331,ce_loss:4.5519 +2022-11-02 16:06:18,238-215-global_steps:12000,grad:1.5411,ce_loss:4.5582 +2022-11-02 16:08:26,354-215-global_steps:12400,grad:1.5474,ce_loss:4.5518 +2022-11-02 16:10:34,980-215-global_steps:12800,grad:1.5516,ce_loss:4.5469 +2022-11-02 16:12:43,382-215-global_steps:13200,grad:1.5555,ce_loss:4.5391 +2022-11-02 16:14:51,580-215-global_steps:13600,grad:1.5587,ce_loss:4.5313 +2022-11-02 16:16:59,500-215-global_steps:14000,grad:1.5618,ce_loss:4.5241 +2022-11-02 16:18:07,187-278-epoch:5 +2022-11-02 16:19:09,348-215-global_steps:14400,grad:1.5946,ce_loss:4.4117 +2022-11-02 16:21:18,205-215-global_steps:14800,grad:1.6040,ce_loss:4.4022 +2022-11-02 16:23:26,077-215-global_steps:15200,grad:1.6100,ce_loss:4.3985 +2022-11-02 16:25:34,073-215-global_steps:15600,grad:1.6143,ce_loss:4.3948 +2022-11-02 16:27:42,605-215-global_steps:16000,grad:1.6179,ce_loss:4.3875 +2022-11-02 16:29:51,310-215-global_steps:16400,grad:1.6213,ce_loss:4.3829 +2022-11-02 16:31:59,734-215-global_steps:16800,grad:1.6246,ce_loss:4.3772 +2022-11-02 16:33:20,538-278-epoch:6 +2022-11-02 16:34:09,065-215-global_steps:17200,grad:1.6565,ce_loss:4.2726 +2022-11-02 16:36:17,618-215-global_steps:17600,grad:1.6668,ce_loss:4.2699 +2022-11-02 16:38:27,820-215-global_steps:18000,grad:1.6729,ce_loss:4.2693 +2022-11-02 16:40:37,675-215-global_steps:18400,grad:1.6774,ce_loss:4.2657 +2022-11-02 16:42:47,299-215-global_steps:18800,grad:1.6821,ce_loss:4.2606 +2022-11-02 16:44:56,879-215-global_steps:19200,grad:1.6857,ce_loss:4.2554 +2022-11-02 16:47:06,381-215-global_steps:19600,grad:1.6888,ce_loss:4.2499 +2022-11-02 16:48:43,386-278-epoch:7 +2022-11-02 16:49:19,711-215-global_steps:20000,grad:1.7174,ce_loss:4.1692 +2022-11-02 16:51:31,453-215-global_steps:20400,grad:1.7316,ce_loss:4.1526 +2022-11-02 16:53:44,725-215-global_steps:20800,grad:1.7380,ce_loss:4.1496 +2022-11-02 16:55:58,809-215-global_steps:21200,grad:1.7427,ce_loss:4.1470 +2022-11-02 16:58:12,607-215-global_steps:21600,grad:1.7465,ce_loss:4.1450 +2022-11-02 17:00:25,534-215-global_steps:22000,grad:1.7501,ce_loss:4.1415 +2022-11-02 17:02:38,811-215-global_steps:22400,grad:1.7534,ce_loss:4.1360 +2022-11-02 17:04:31,621-278-epoch:8 +2022-11-02 17:04:54,805-215-global_steps:22800,grad:1.7857,ce_loss:4.0460 +2022-11-02 17:07:09,030-215-global_steps:23200,grad:1.7969,ce_loss:4.0404 +2022-11-02 17:09:22,056-215-global_steps:23600,grad:1.8022,ce_loss:4.0387 +2022-11-02 17:11:35,395-215-global_steps:24000,grad:1.8075,ce_loss:4.0389 +2022-11-02 17:13:48,699-215-global_steps:24400,grad:1.8111,ce_loss:4.0365 +2022-11-02 17:16:02,137-215-global_steps:24800,grad:1.8141,ce_loss:4.0335 +2022-11-02 17:18:15,891-215-global_steps:25200,grad:1.8171,ce_loss:4.0317 +2022-11-02 17:20:22,657-278-epoch:9 +2022-11-02 17:20:31,418-215-global_steps:25600,grad:1.8345,ce_loss:3.9509 +2022-11-02 17:22:45,832-215-global_steps:26000,grad:1.8584,ce_loss:3.9424 +2022-11-02 17:24:59,088-215-global_steps:26400,grad:1.8636,ce_loss:3.9441 +2022-11-02 17:27:12,321-215-global_steps:26800,grad:1.8682,ce_loss:3.9468 +2022-11-02 17:29:26,345-215-global_steps:27200,grad:1.8721,ce_loss:3.9458 +2022-11-02 17:31:40,818-215-global_steps:27600,grad:1.8751,ce_loss:3.9425 +2022-11-02 17:33:53,760-215-global_steps:28000,grad:1.8777,ce_loss:3.9403 +2022-11-02 17:36:06,559-215-global_steps:28400,grad:1.8802,ce_loss:3.9386 +2022-11-02 17:36:13,700-278-epoch:10 +2022-11-02 17:38:22,009-215-global_steps:28800,grad:1.9118,ce_loss:3.8588 +2022-11-02 17:40:36,092-215-global_steps:29200,grad:1.9209,ce_loss:3.8625 +2022-11-02 17:42:49,955-215-global_steps:29600,grad:1.9246,ce_loss:3.8626 +2022-11-02 17:45:03,881-215-global_steps:30000,grad:1.9282,ce_loss:3.8613 +2022-11-02 17:47:17,246-215-global_steps:30400,grad:1.9316,ce_loss:3.8600 +2022-11-02 17:49:31,726-215-global_steps:30800,grad:1.9346,ce_loss:3.8583 +2022-11-02 17:51:45,797-215-global_steps:31200,grad:1.9374,ce_loss:3.8551 +2022-11-02 17:52:11,749-278-epoch:11 +2022-11-02 17:54:25,254-215-global_steps:31600,grad:1.9675,ce_loss:3.7734 +2022-11-02 17:56:40,390-215-global_steps:32000,grad:1.9767,ce_loss:3.7762 +2022-11-02 17:58:53,976-215-global_steps:32400,grad:1.9813,ce_loss:3.7835 +2022-11-02 18:01:07,165-215-global_steps:32800,grad:1.9844,ce_loss:3.7824 +2022-11-02 18:03:20,023-215-global_steps:33200,grad:1.9870,ce_loss:3.7811 +2022-11-02 18:05:32,960-215-global_steps:33600,grad:1.9894,ce_loss:3.7800 +2022-11-02 18:07:45,807-215-global_steps:34000,grad:1.9917,ce_loss:3.7791 +2022-11-02 18:08:21,462-278-epoch:12 +2022-11-02 18:10:02,139-215-global_steps:34400,grad:2.0203,ce_loss:3.7071 +2022-11-02 18:12:15,164-215-global_steps:34800,grad:2.0288,ce_loss:3.7061 +2022-11-02 18:14:43,363-215-global_steps:35200,grad:2.0329,ce_loss:3.7081 +2022-11-02 18:17:09,374-215-global_steps:35600,grad:2.0368,ce_loss:3.7070 +2022-11-02 18:19:22,498-215-global_steps:36000,grad:2.0398,ce_loss:3.7061 +2022-11-02 18:21:35,905-215-global_steps:36400,grad:2.0419,ce_loss:3.7064 +2022-11-02 18:23:50,657-215-global_steps:36800,grad:2.0439,ce_loss:3.7055 +2022-11-02 18:24:39,664-278-epoch:13 +2022-11-02 18:26:06,705-215-global_steps:37200,grad:2.0661,ce_loss:3.6380 +2022-11-02 18:28:20,249-215-global_steps:37600,grad:2.0770,ce_loss:3.6369 +2022-11-02 18:30:33,997-215-global_steps:38000,grad:2.0822,ce_loss:3.6386 +2022-11-02 18:32:48,240-215-global_steps:38400,grad:2.0848,ce_loss:3.6396 +2022-11-02 18:35:02,369-215-global_steps:38800,grad:2.0870,ce_loss:3.6401 +2022-11-02 18:37:16,158-215-global_steps:39200,grad:2.0900,ce_loss:3.6398 +2022-11-02 18:39:29,149-215-global_steps:39600,grad:2.0922,ce_loss:3.6402 +2022-11-02 18:40:32,102-278-epoch:14 +2022-11-02 18:41:44,387-215-global_steps:40000,grad:2.1099,ce_loss:3.5542 +2022-11-02 18:43:58,355-215-global_steps:40400,grad:2.1222,ce_loss:3.5604 +2022-11-02 18:46:12,639-215-global_steps:40800,grad:2.1281,ce_loss:3.5717 +2022-11-02 18:48:26,922-215-global_steps:41200,grad:2.1322,ce_loss:3.5737 +2022-11-02 18:50:40,626-215-global_steps:41600,grad:2.1348,ce_loss:3.5761 +2022-11-02 18:52:54,882-215-global_steps:42000,grad:2.1365,ce_loss:3.5771 +2022-11-02 18:55:08,431-215-global_steps:42400,grad:2.1379,ce_loss:3.5777 +2022-11-02 18:56:24,942-278-epoch:15 +2022-11-02 18:57:23,424-215-global_steps:42800,grad:2.1536,ce_loss:3.5038 +2022-11-02 18:59:38,542-215-global_steps:43200,grad:2.1645,ce_loss:3.5109 +2022-11-02 19:01:52,069-215-global_steps:43600,grad:2.1704,ce_loss:3.5154 +2022-11-02 19:04:05,518-215-global_steps:44000,grad:2.1749,ce_loss:3.5196 +2022-11-02 19:06:19,488-215-global_steps:44400,grad:2.1770,ce_loss:3.5200 +2022-11-02 19:08:33,941-215-global_steps:44800,grad:2.1789,ce_loss:3.5208 +2022-11-02 19:10:48,833-215-global_steps:45200,grad:2.1805,ce_loss:3.5208 +2022-11-02 19:12:21,250-278-epoch:16 +2022-11-02 19:13:06,178-215-global_steps:45600,grad:2.1855,ce_loss:3.4528 +2022-11-02 19:15:19,961-215-global_steps:46000,grad:2.2013,ce_loss:3.4555 +2022-11-02 19:17:34,541-215-global_steps:46400,grad:2.2085,ce_loss:3.4599 +2022-11-02 19:19:48,648-215-global_steps:46800,grad:2.2130,ce_loss:3.4614 +2022-11-02 19:22:03,000-215-global_steps:47200,grad:2.2154,ce_loss:3.4635 +2022-11-02 19:24:16,659-215-global_steps:47600,grad:2.2176,ce_loss:3.4641 +2022-11-02 19:26:31,368-215-global_steps:48000,grad:2.2196,ce_loss:3.4656 +2022-11-02 19:28:16,368-278-epoch:17 +2022-11-02 19:28:46,179-215-global_steps:48400,grad:2.2299,ce_loss:3.4020 +2022-11-02 19:30:59,804-215-global_steps:48800,grad:2.2413,ce_loss:3.4025 +2022-11-02 19:33:13,260-215-global_steps:49200,grad:2.2493,ce_loss:3.4043 +2022-11-02 19:35:26,631-215-global_steps:49600,grad:2.2517,ce_loss:3.4071 +2022-11-02 19:37:41,412-215-global_steps:50000,grad:2.2540,ce_loss:3.4104 +2022-11-02 19:39:56,038-215-global_steps:50400,grad:2.2557,ce_loss:3.4124 +2022-11-02 19:42:09,705-215-global_steps:50800,grad:2.2574,ce_loss:3.4126 +2022-11-02 19:44:08,945-278-epoch:18 +2022-11-02 19:44:25,174-215-global_steps:51200,grad:2.2561,ce_loss:3.3134 +2022-11-02 19:46:39,592-215-global_steps:51600,grad:2.2774,ce_loss:3.3482 +2022-11-02 19:48:53,413-215-global_steps:52000,grad:2.2841,ce_loss:3.3542 +2022-11-02 19:51:07,529-215-global_steps:52400,grad:2.2867,ce_loss:3.3550 +2022-11-02 19:53:21,817-215-global_steps:52800,grad:2.2898,ce_loss:3.3581 +2022-11-02 19:55:35,915-215-global_steps:53200,grad:2.2920,ce_loss:3.3608 +2022-11-02 19:57:49,579-215-global_steps:53600,grad:2.2936,ce_loss:3.3634 +2022-11-02 20:00:02,989-278-epoch:19 +2022-11-02 20:00:05,187-215-global_steps:54000,grad:2.2641,ce_loss:3.2755 +2022-11-02 20:02:18,488-215-global_steps:54400,grad:2.3088,ce_loss:3.3005 +2022-11-02 20:04:32,377-215-global_steps:54800,grad:2.3156,ce_loss:3.3071 +2022-11-02 20:06:45,165-215-global_steps:55200,grad:2.3209,ce_loss:3.3114 +2022-11-02 20:08:58,169-215-global_steps:55600,grad:2.3232,ce_loss:3.3132 +2022-11-02 20:11:11,854-215-global_steps:56000,grad:2.3248,ce_loss:3.3161 +2022-11-02 20:13:25,275-215-global_steps:56400,grad:2.3256,ce_loss:3.3182 +2022-11-02 20:15:39,732-215-global_steps:56800,grad:2.3269,ce_loss:3.3194 +2022-11-02 20:15:53,467-278-epoch:20 +2022-11-02 20:17:55,986-215-global_steps:57200,grad:2.3424,ce_loss:3.2523 +2022-11-02 20:20:09,934-215-global_steps:57600,grad:2.3486,ce_loss:3.2599 +2022-11-02 20:22:23,774-215-global_steps:58000,grad:2.3533,ce_loss:3.2634 +2022-11-02 20:24:37,470-215-global_steps:58400,grad:2.3557,ce_loss:3.2659 +2022-11-02 20:26:51,181-215-global_steps:58800,grad:2.3578,ce_loss:3.2682 +2022-11-02 20:29:05,300-215-global_steps:59200,grad:2.3593,ce_loss:3.2708 +2022-11-02 20:31:19,647-215-global_steps:59600,grad:2.3605,ce_loss:3.2733 +2022-11-02 20:31:47,596-278-epoch:21 +2022-11-02 20:33:35,641-215-global_steps:60000,grad:2.3685,ce_loss:3.2066 +2022-11-02 20:35:49,887-215-global_steps:60400,grad:2.3791,ce_loss:3.2133 +2022-11-02 20:38:04,241-215-global_steps:60800,grad:2.3828,ce_loss:3.2235 +2022-11-02 20:40:18,513-215-global_steps:61200,grad:2.3854,ce_loss:3.2284 +2022-11-02 20:42:32,343-215-global_steps:61600,grad:2.3869,ce_loss:3.2295 +2022-11-02 20:44:45,528-215-global_steps:62000,grad:2.3882,ce_loss:3.2330 +2022-11-02 20:46:59,621-215-global_steps:62400,grad:2.3894,ce_loss:3.2336 +2022-11-02 20:47:41,353-278-epoch:22 +2022-11-02 20:49:15,735-215-global_steps:62800,grad:2.3959,ce_loss:3.1512 +2022-11-02 20:51:29,244-215-global_steps:63200,grad:2.4067,ce_loss:3.1731 +2022-11-02 20:53:43,551-215-global_steps:63600,grad:2.4105,ce_loss:3.1768 +2022-11-02 20:55:58,173-215-global_steps:64000,grad:2.4123,ce_loss:3.1824 +2022-11-02 20:58:12,579-215-global_steps:64400,grad:2.4141,ce_loss:3.1871 +2022-11-02 21:00:26,062-215-global_steps:64800,grad:2.4158,ce_loss:3.1925 +2022-11-02 21:02:39,855-215-global_steps:65200,grad:2.4170,ce_loss:3.1956 +2022-11-02 21:03:35,681-278-epoch:23 +2022-11-02 21:04:55,328-215-global_steps:65600,grad:2.4217,ce_loss:3.1254 +2022-11-02 21:07:09,964-215-global_steps:66000,grad:2.4304,ce_loss:3.1390 +2022-11-02 21:09:23,407-215-global_steps:66400,grad:2.4363,ce_loss:3.1444 +2022-11-02 21:11:37,023-215-global_steps:66800,grad:2.4392,ce_loss:3.1482 +2022-11-02 21:13:51,445-215-global_steps:67200,grad:2.4412,ce_loss:3.1500 +2022-11-02 21:16:05,864-215-global_steps:67600,grad:2.4431,ce_loss:3.1526 +2022-11-02 21:18:19,434-215-global_steps:68000,grad:2.4443,ce_loss:3.1544 +2022-11-02 21:19:29,723-278-epoch:24 +2022-11-02 21:20:35,304-215-global_steps:68400,grad:2.4517,ce_loss:3.0797 +2022-11-02 21:22:48,462-215-global_steps:68800,grad:2.4555,ce_loss:3.0957 +2022-11-02 21:25:01,957-215-global_steps:69200,grad:2.4615,ce_loss:3.1069 +2022-11-02 21:27:15,507-215-global_steps:69600,grad:2.4642,ce_loss:3.1124 +2022-11-02 21:29:28,156-215-global_steps:70000,grad:2.4669,ce_loss:3.1149 +2022-11-02 21:31:43,179-215-global_steps:70400,grad:2.4681,ce_loss:3.1168 +2022-11-02 21:33:57,479-215-global_steps:70800,grad:2.4694,ce_loss:3.1192 +2022-11-02 21:35:21,141-278-epoch:25 +2022-11-02 21:36:12,680-215-global_steps:71200,grad:2.4646,ce_loss:3.0462 +2022-11-02 21:38:26,062-215-global_steps:71600,grad:2.4791,ce_loss:3.0621 +2022-11-02 21:40:39,658-215-global_steps:72000,grad:2.4845,ce_loss:3.0669 +2022-11-02 21:42:53,978-215-global_steps:72400,grad:2.4888,ce_loss:3.0740 +2022-11-02 21:45:08,225-215-global_steps:72800,grad:2.4910,ce_loss:3.0784 +2022-11-02 21:47:21,110-215-global_steps:73200,grad:2.4922,ce_loss:3.0814 +2022-11-02 21:49:34,901-215-global_steps:73600,grad:2.4932,ce_loss:3.0830 +2022-11-02 21:51:13,226-278-epoch:26 +2022-11-02 21:51:50,550-215-global_steps:74000,grad:2.4933,ce_loss:3.0053 +2022-11-02 21:54:03,820-215-global_steps:74400,grad:2.5060,ce_loss:3.0317 +2022-11-02 21:56:17,812-215-global_steps:74800,grad:2.5106,ce_loss:3.0401 +2022-11-02 21:58:31,339-215-global_steps:75200,grad:2.5114,ce_loss:3.0420 +2022-11-02 22:00:44,675-215-global_steps:75600,grad:2.5135,ce_loss:3.0447 +2022-11-02 22:02:57,802-215-global_steps:76000,grad:2.5149,ce_loss:3.0489 +2022-11-02 22:05:10,626-215-global_steps:76400,grad:2.5163,ce_loss:3.0519 +2022-11-02 22:07:02,983-278-epoch:27 +2022-11-02 22:07:26,495-215-global_steps:76800,grad:2.5062,ce_loss:2.9857 +2022-11-02 22:09:40,932-215-global_steps:77200,grad:2.5222,ce_loss:3.0012 +2022-11-02 22:11:54,431-215-global_steps:77600,grad:2.5285,ce_loss:3.0037 +2022-11-02 22:14:07,826-215-global_steps:78000,grad:2.5323,ce_loss:3.0076 +2022-11-02 22:16:21,128-215-global_steps:78400,grad:2.5350,ce_loss:3.0113 +2022-11-02 22:18:35,269-215-global_steps:78800,grad:2.5365,ce_loss:3.0142 +2022-11-02 22:20:48,923-215-global_steps:79200,grad:2.5377,ce_loss:3.0174 +2022-11-02 22:22:54,140-278-epoch:28 +2022-11-02 22:23:03,472-215-global_steps:79600,grad:2.5179,ce_loss:2.9536 +2022-11-02 22:25:16,932-215-global_steps:80000,grad:2.5405,ce_loss:2.9587 +2022-11-02 22:27:30,472-215-global_steps:80400,grad:2.5498,ce_loss:2.9703 +2022-11-02 22:29:43,397-215-global_steps:80800,grad:2.5544,ce_loss:2.9788 +2022-11-02 22:31:56,638-215-global_steps:81200,grad:2.5562,ce_loss:2.9799 +2022-11-02 22:34:10,816-215-global_steps:81600,grad:2.5582,ce_loss:2.9846 +2022-11-02 22:36:24,877-215-global_steps:82000,grad:2.5597,ce_loss:2.9869 +2022-11-02 22:38:38,516-215-global_steps:82400,grad:2.5609,ce_loss:2.9889 +2022-11-02 22:38:44,676-278-epoch:29 +2022-11-02 22:40:54,230-215-global_steps:82800,grad:2.5631,ce_loss:2.9263 +2022-11-02 22:43:08,091-215-global_steps:83200,grad:2.5700,ce_loss:2.9379 +2022-11-02 22:45:22,225-215-global_steps:83600,grad:2.5739,ce_loss:2.9438 +2022-11-02 22:47:36,692-215-global_steps:84000,grad:2.5764,ce_loss:2.9496 +2022-11-02 22:49:50,350-215-global_steps:84400,grad:2.5788,ce_loss:2.9547 +2022-11-02 22:52:04,298-215-global_steps:84800,grad:2.5798,ce_loss:2.9578 +2022-11-02 22:54:18,175-215-global_steps:85200,grad:2.5802,ce_loss:2.9599 +2022-11-02 22:54:38,931-278-epoch:30 +2022-11-02 22:56:33,547-215-global_steps:85600,grad:2.5832,ce_loss:2.9047 +2022-11-02 22:58:47,211-215-global_steps:86000,grad:2.5879,ce_loss:2.9083 +2022-11-02 23:01:02,113-215-global_steps:86400,grad:2.5927,ce_loss:2.9164 +2022-11-02 23:03:16,120-215-global_steps:86800,grad:2.5960,ce_loss:2.9220 +2022-11-02 23:05:29,519-215-global_steps:87200,grad:2.5975,ce_loss:2.9268 +2022-11-02 23:07:42,904-215-global_steps:87600,grad:2.5984,ce_loss:2.9294 +2022-11-02 23:09:56,232-215-global_steps:88000,grad:2.5986,ce_loss:2.9311 +2022-11-02 23:10:31,097-278-epoch:31 +2022-11-02 23:12:12,772-215-global_steps:88400,grad:2.5980,ce_loss:2.8728 +2022-11-02 23:14:26,832-215-global_steps:88800,grad:2.6036,ce_loss:2.8799 +2022-11-02 23:16:40,090-215-global_steps:89200,grad:2.6092,ce_loss:2.8878 +2022-11-02 23:18:54,484-215-global_steps:89600,grad:2.6127,ce_loss:2.8895 +2022-11-02 23:21:08,128-215-global_steps:90000,grad:2.6149,ce_loss:2.8946 +2022-11-02 23:23:21,094-215-global_steps:90400,grad:2.6162,ce_loss:2.8982 +2022-11-02 23:25:35,710-215-global_steps:90800,grad:2.6172,ce_loss:2.9012 +2022-11-02 23:26:24,883-278-epoch:32 +2022-11-02 23:27:52,890-215-global_steps:91200,grad:2.6077,ce_loss:2.8423 +2022-11-02 23:30:07,069-215-global_steps:91600,grad:2.6218,ce_loss:2.8517 +2022-11-02 23:32:20,845-215-global_steps:92000,grad:2.6274,ce_loss:2.8578 +2022-11-02 23:34:34,902-215-global_steps:92400,grad:2.6311,ce_loss:2.8654 +2022-11-02 23:36:48,748-215-global_steps:92800,grad:2.6333,ce_loss:2.8692 +2022-11-02 23:39:03,081-215-global_steps:93200,grad:2.6344,ce_loss:2.8736 +2022-11-02 23:41:16,761-215-global_steps:93600,grad:2.6355,ce_loss:2.8772 +2022-11-02 23:42:19,190-278-epoch:33 +2022-11-02 23:43:31,626-215-global_steps:94000,grad:2.6203,ce_loss:2.8107 +2022-11-02 23:45:45,345-215-global_steps:94400,grad:2.6348,ce_loss:2.8205 +2022-11-02 23:47:59,387-215-global_steps:94800,grad:2.6428,ce_loss:2.8293 +2022-11-02 23:50:13,275-215-global_steps:95200,grad:2.6469,ce_loss:2.8351 +2022-11-02 23:52:27,026-215-global_steps:95600,grad:2.6494,ce_loss:2.8426 +2022-11-02 23:54:40,765-215-global_steps:96000,grad:2.6503,ce_loss:2.8469 +2022-11-02 23:56:53,847-215-global_steps:96400,grad:2.6509,ce_loss:2.8509 +2022-11-02 23:58:10,207-278-epoch:34 +2022-11-02 23:59:09,451-215-global_steps:96800,grad:2.6328,ce_loss:2.7830 +2022-11-03 00:01:22,758-215-global_steps:97200,grad:2.6506,ce_loss:2.7993 +2022-11-03 00:03:37,291-215-global_steps:97600,grad:2.6584,ce_loss:2.8050 +2022-11-03 00:05:51,807-215-global_steps:98000,grad:2.6620,ce_loss:2.8133 +2022-11-03 00:08:06,077-215-global_steps:98400,grad:2.6650,ce_loss:2.8203 +2022-11-03 00:10:20,431-215-global_steps:98800,grad:2.6660,ce_loss:2.8247 +2022-11-03 00:12:34,502-215-global_steps:99200,grad:2.6659,ce_loss:2.8264 +2022-11-03 00:14:04,513-278-epoch:35 +2022-11-03 00:14:49,934-215-global_steps:99600,grad:2.6547,ce_loss:2.7588 +2022-11-03 00:17:03,923-215-global_steps:100000,grad:2.6642,ce_loss:2.7624 +2022-11-03 00:19:17,455-215-global_steps:100400,grad:2.6719,ce_loss:2.7786 +2022-11-03 00:21:31,078-215-global_steps:100800,grad:2.6766,ce_loss:2.7880 +2022-11-03 00:23:44,822-215-global_steps:101200,grad:2.6791,ce_loss:2.7928 +2022-11-03 00:25:58,828-215-global_steps:101600,grad:2.6808,ce_loss:2.7974 +2022-11-03 00:28:13,474-215-global_steps:102000,grad:2.6825,ce_loss:2.8012 +2022-11-03 00:29:58,694-278-epoch:36 +2022-11-03 00:30:29,521-215-global_steps:102400,grad:2.6720,ce_loss:2.7225 +2022-11-03 00:32:43,230-215-global_steps:102800,grad:2.6797,ce_loss:2.7411 +2022-11-03 00:34:56,236-215-global_steps:103200,grad:2.6871,ce_loss:2.7532 +2022-11-03 00:37:09,927-215-global_steps:103600,grad:2.6911,ce_loss:2.7615 +2022-11-03 00:39:24,001-215-global_steps:104000,grad:2.6938,ce_loss:2.7658 +2022-11-03 00:41:38,740-215-global_steps:104400,grad:2.6956,ce_loss:2.7706 +2022-11-03 00:43:53,313-215-global_steps:104800,grad:2.6974,ce_loss:2.7755 +2022-11-03 00:45:51,770-278-epoch:37 +2022-11-03 00:46:08,513-215-global_steps:105200,grad:2.6725,ce_loss:2.7019 +2022-11-03 00:48:22,877-215-global_steps:105600,grad:2.6920,ce_loss:2.7248 +2022-11-03 00:50:36,663-215-global_steps:106000,grad:2.7004,ce_loss:2.7309 +2022-11-03 00:52:51,200-215-global_steps:106400,grad:2.7032,ce_loss:2.7355 +2022-11-03 00:55:05,000-215-global_steps:106800,grad:2.7067,ce_loss:2.7411 +2022-11-03 00:57:19,146-215-global_steps:107200,grad:2.7081,ce_loss:2.7463 +2022-11-03 00:59:32,373-215-global_steps:107600,grad:2.7098,ce_loss:2.7509 +2022-11-03 01:01:44,564-278-epoch:38 +2022-11-03 01:01:47,557-215-global_steps:108000,grad:2.6482,ce_loss:2.6811 +2022-11-03 01:04:00,192-215-global_steps:108400,grad:2.7047,ce_loss:2.7062 +2022-11-03 01:06:14,204-215-global_steps:108800,grad:2.7112,ce_loss:2.7153 +2022-11-03 01:08:28,759-215-global_steps:109200,grad:2.7185,ce_loss:2.7225 +2022-11-03 01:10:42,569-215-global_steps:109600,grad:2.7211,ce_loss:2.7276 +2022-11-03 01:12:56,176-215-global_steps:110000,grad:2.7229,ce_loss:2.7315 +2022-11-03 01:15:10,745-215-global_steps:110400,grad:2.7235,ce_loss:2.7335 +2022-11-03 01:17:24,786-215-global_steps:110800,grad:2.7244,ce_loss:2.7368 +2022-11-03 01:17:38,065-278-epoch:39 +2022-11-03 01:19:40,629-215-global_steps:111200,grad:2.7157,ce_loss:2.6787 +2022-11-03 01:21:54,406-215-global_steps:111600,grad:2.7237,ce_loss:2.6845 +2022-11-03 01:24:07,878-215-global_steps:112000,grad:2.7293,ce_loss:2.6933 +2022-11-03 01:26:21,593-215-global_steps:112400,grad:2.7333,ce_loss:2.6990 +2022-11-03 01:28:35,216-215-global_steps:112800,grad:2.7354,ce_loss:2.7052 +2022-11-03 01:30:49,234-215-global_steps:113200,grad:2.7369,ce_loss:2.7103 +2022-11-03 01:33:04,275-215-global_steps:113600,grad:2.7374,ce_loss:2.7148 +2022-11-03 01:33:31,510-278-epoch:40 +2022-11-03 01:35:20,200-215-global_steps:114000,grad:2.7320,ce_loss:2.6609 +2022-11-03 01:37:33,636-215-global_steps:114400,grad:2.7402,ce_loss:2.6682 +2022-11-03 01:39:47,405-215-global_steps:114800,grad:2.7433,ce_loss:2.6734 +2022-11-03 01:42:01,265-215-global_steps:115200,grad:2.7468,ce_loss:2.6790 +2022-11-03 01:44:16,233-215-global_steps:115600,grad:2.7485,ce_loss:2.6849 +2022-11-03 01:46:29,722-215-global_steps:116000,grad:2.7488,ce_loss:2.6894 +2022-11-03 01:48:42,698-215-global_steps:116400,grad:2.7495,ce_loss:2.6933 +2022-11-03 01:49:23,652-278-epoch:41 +2022-11-03 01:50:57,867-215-global_steps:116800,grad:2.7340,ce_loss:2.6237 +2022-11-03 01:53:10,819-215-global_steps:117200,grad:2.7461,ce_loss:2.6410 +2022-11-03 01:55:23,586-215-global_steps:117600,grad:2.7516,ce_loss:2.6512 +2022-11-03 01:57:37,606-215-global_steps:118000,grad:2.7551,ce_loss:2.6567 +2022-11-03 01:59:51,533-215-global_steps:118400,grad:2.7570,ce_loss:2.6626 +2022-11-03 02:02:05,884-215-global_steps:118800,grad:2.7590,ce_loss:2.6673 +2022-11-03 02:04:19,424-215-global_steps:119200,grad:2.7595,ce_loss:2.6720 +2022-11-03 02:05:14,446-278-epoch:42 +2022-11-03 02:06:34,592-215-global_steps:119600,grad:2.7443,ce_loss:2.6163 +2022-11-03 02:08:48,295-215-global_steps:120000,grad:2.7571,ce_loss:2.6233 +2022-11-03 02:11:02,409-215-global_steps:120400,grad:2.7627,ce_loss:2.6314 +2022-11-03 02:13:15,888-215-global_steps:120800,grad:2.7663,ce_loss:2.6383 +2022-11-03 02:15:29,046-215-global_steps:121200,grad:2.7673,ce_loss:2.6419 +2022-11-03 02:17:43,856-215-global_steps:121600,grad:2.7698,ce_loss:2.6465 +2022-11-03 02:19:58,470-215-global_steps:122000,grad:2.7714,ce_loss:2.6510 +2022-11-03 02:21:07,992-278-epoch:43 +2022-11-03 02:22:14,142-215-global_steps:122400,grad:2.7540,ce_loss:2.5963 +2022-11-03 02:24:27,384-215-global_steps:122800,grad:2.7687,ce_loss:2.6080 +2022-11-03 02:26:41,119-215-global_steps:123200,grad:2.7748,ce_loss:2.6177 +2022-11-03 02:28:54,671-215-global_steps:123600,grad:2.7797,ce_loss:2.6248 +2022-11-03 02:31:08,623-215-global_steps:124000,grad:2.7814,ce_loss:2.6289 +2022-11-03 02:33:22,612-215-global_steps:124400,grad:2.7823,ce_loss:2.6317 +2022-11-03 02:35:37,249-215-global_steps:124800,grad:2.7835,ce_loss:2.6342 +2022-11-03 02:37:01,120-278-epoch:44 +2022-11-03 02:37:53,576-215-global_steps:125200,grad:2.7622,ce_loss:2.5740 +2022-11-03 02:40:07,222-215-global_steps:125600,grad:2.7785,ce_loss:2.5893 +2022-11-03 02:42:20,862-215-global_steps:126000,grad:2.7836,ce_loss:2.5971 +2022-11-03 02:44:34,332-215-global_steps:126400,grad:2.7878,ce_loss:2.6058 +2022-11-03 02:46:48,140-215-global_steps:126800,grad:2.7899,ce_loss:2.6099 +2022-11-03 02:49:02,211-215-global_steps:127200,grad:2.7921,ce_loss:2.6135 +2022-11-03 02:51:15,616-215-global_steps:127600,grad:2.7933,ce_loss:2.6170 +2022-11-03 02:52:53,161-278-epoch:45 +2022-11-03 02:53:31,022-215-global_steps:128000,grad:2.7757,ce_loss:2.5657 +2022-11-03 02:55:45,516-215-global_steps:128400,grad:2.7863,ce_loss:2.5662 +2022-11-03 02:57:59,085-215-global_steps:128800,grad:2.7932,ce_loss:2.5718 +2022-11-03 03:00:12,129-215-global_steps:129200,grad:2.7972,ce_loss:2.5810 +2022-11-03 03:02:25,973-215-global_steps:129600,grad:2.8008,ce_loss:2.5891 +2022-11-03 03:04:39,701-215-global_steps:130000,grad:2.8023,ce_loss:2.5941 +2022-11-03 03:06:53,292-215-global_steps:130400,grad:2.8039,ce_loss:2.5985 +2022-11-03 03:08:44,521-278-epoch:46 +2022-11-03 03:09:08,534-215-global_steps:130800,grad:2.7627,ce_loss:2.5118 +2022-11-03 03:11:21,409-215-global_steps:131200,grad:2.7895,ce_loss:2.5388 +2022-11-03 03:13:35,324-215-global_steps:131600,grad:2.7973,ce_loss:2.5533 +2022-11-03 03:15:48,970-215-global_steps:132000,grad:2.8041,ce_loss:2.5633 +2022-11-03 03:18:02,522-215-global_steps:132400,grad:2.8079,ce_loss:2.5713 +2022-11-03 03:20:16,489-215-global_steps:132800,grad:2.8096,ce_loss:2.5756 +2022-11-03 03:22:30,314-215-global_steps:133200,grad:2.8108,ce_loss:2.5798 +2022-11-03 03:24:35,719-278-epoch:47 +2022-11-03 03:24:46,156-215-global_steps:133600,grad:2.7828,ce_loss:2.5472 +2022-11-03 03:26:59,798-215-global_steps:134000,grad:2.8015,ce_loss:2.5335 +2022-11-03 03:29:12,673-215-global_steps:134400,grad:2.8098,ce_loss:2.5396 +2022-11-03 03:31:26,498-215-global_steps:134800,grad:2.8135,ce_loss:2.5448 +2022-11-03 03:33:40,066-215-global_steps:135200,grad:2.8168,ce_loss:2.5515 +2022-11-03 03:35:53,119-215-global_steps:135600,grad:2.8205,ce_loss:2.5576 +2022-11-03 03:38:07,667-215-global_steps:136000,grad:2.8211,ce_loss:2.5617 +2022-11-03 03:40:21,662-215-global_steps:136400,grad:2.8222,ce_loss:2.5649 +2022-11-03 03:40:27,483-278-epoch:48 +2022-11-03 03:42:37,014-215-global_steps:136800,grad:2.8013,ce_loss:2.5052 +2022-11-03 03:44:50,221-215-global_steps:137200,grad:2.8167,ce_loss:2.5212 +2022-11-03 03:47:03,094-215-global_steps:137600,grad:2.8254,ce_loss:2.5302 +2022-11-03 03:49:16,615-215-global_steps:138000,grad:2.8284,ce_loss:2.5368 +2022-11-03 03:51:31,111-215-global_steps:138400,grad:2.8299,ce_loss:2.5405 +2022-11-03 03:53:44,965-215-global_steps:138800,grad:2.8315,ce_loss:2.5436 +2022-11-03 03:55:58,698-215-global_steps:139200,grad:2.8324,ce_loss:2.5480 +2022-11-03 03:56:18,052-278-epoch:49 +2022-11-03 03:58:12,628-215-global_steps:139600,grad:2.8117,ce_loss:2.4906 +2022-11-03 04:00:24,780-215-global_steps:140000,grad:2.8261,ce_loss:2.5054 +2022-11-03 04:02:38,083-215-global_steps:140400,grad:2.8313,ce_loss:2.5148 +2022-11-03 04:04:50,609-215-global_steps:140800,grad:2.8347,ce_loss:2.5210 +2022-11-03 04:07:04,256-215-global_steps:141200,grad:2.8378,ce_loss:2.5275 +2022-11-03 04:09:16,812-215-global_steps:141600,grad:2.8397,ce_loss:2.5334 +2022-11-03 04:11:27,921-215-global_steps:142000,grad:2.8408,ce_loss:2.5380 +2022-11-03 04:12:01,443-278-epoch:50 +2022-11-03 04:13:41,655-215-global_steps:142400,grad:2.8120,ce_loss:2.4761 +2022-11-03 04:15:54,379-215-global_steps:142800,grad:2.8255,ce_loss:2.4811 +2022-11-03 04:18:06,637-215-global_steps:143200,grad:2.8343,ce_loss:2.4902 +2022-11-03 04:20:20,007-215-global_steps:143600,grad:2.8391,ce_loss:2.4987 +2022-11-03 04:22:32,383-215-global_steps:144000,grad:2.8418,ce_loss:2.5034 +2022-11-03 04:24:45,418-215-global_steps:144400,grad:2.8441,ce_loss:2.5090 +2022-11-03 04:26:57,491-215-global_steps:144800,grad:2.8457,ce_loss:2.5134 +2022-11-03 04:27:43,816-278-epoch:51 +2022-11-03 04:29:10,416-215-global_steps:145200,grad:2.8262,ce_loss:2.4565 +2022-11-03 04:31:22,052-215-global_steps:145600,grad:2.8421,ce_loss:2.4690 +2022-11-03 04:33:34,793-215-global_steps:146000,grad:2.8485,ce_loss:2.4788 +2022-11-03 04:35:46,239-215-global_steps:146400,grad:2.8521,ce_loss:2.4858 +2022-11-03 04:37:58,746-215-global_steps:146800,grad:2.8546,ce_loss:2.4916 +2022-11-03 04:40:11,576-215-global_steps:147200,grad:2.8564,ce_loss:2.4972 +2022-11-03 04:42:25,215-215-global_steps:147600,grad:2.8567,ce_loss:2.5004 +2022-11-03 04:43:27,177-278-epoch:52 +2022-11-03 04:44:41,130-215-global_steps:148000,grad:2.8384,ce_loss:2.4522 +2022-11-03 04:46:55,022-215-global_steps:148400,grad:2.8458,ce_loss:2.4542 +2022-11-03 04:49:08,403-215-global_steps:148800,grad:2.8536,ce_loss:2.4623 +2022-11-03 04:51:22,064-215-global_steps:149200,grad:2.8609,ce_loss:2.4736 +2022-11-03 04:53:33,691-215-global_steps:149600,grad:2.8623,ce_loss:2.4801 +2022-11-03 04:55:47,311-215-global_steps:150000,grad:2.8644,ce_loss:2.4855 +2022-11-03 04:58:00,835-215-global_steps:150400,grad:2.8656,ce_loss:2.4895 +2022-11-03 04:59:15,720-278-epoch:53 +2022-11-03 05:00:14,437-215-global_steps:150800,grad:2.8326,ce_loss:2.4258 +2022-11-03 05:02:27,177-215-global_steps:151200,grad:2.8480,ce_loss:2.4404 +2022-11-03 05:04:39,632-215-global_steps:151600,grad:2.8568,ce_loss:2.4484 +2022-11-03 05:06:52,571-215-global_steps:152000,grad:2.8612,ce_loss:2.4564 +2022-11-03 05:09:05,656-215-global_steps:152400,grad:2.8660,ce_loss:2.4634 +2022-11-03 05:11:18,998-215-global_steps:152800,grad:2.8678,ce_loss:2.4679 +2022-11-03 05:13:31,408-215-global_steps:153200,grad:2.8696,ce_loss:2.4731 +2022-11-03 05:15:01,190-278-epoch:54 +2022-11-03 05:15:46,342-215-global_steps:153600,grad:2.8356,ce_loss:2.4230 +2022-11-03 05:17:59,191-215-global_steps:154000,grad:2.8562,ce_loss:2.4284 +2022-11-03 05:20:11,508-215-global_steps:154400,grad:2.8655,ce_loss:2.4408 +2022-11-03 05:22:24,754-215-global_steps:154800,grad:2.8711,ce_loss:2.4478 +2022-11-03 05:24:36,985-215-global_steps:155200,grad:2.8737,ce_loss:2.4530 +2022-11-03 05:26:48,740-215-global_steps:155600,grad:2.8760,ce_loss:2.4572 +2022-11-03 05:29:01,469-215-global_steps:156000,grad:2.8774,ce_loss:2.4601 +2022-11-03 05:30:44,696-278-epoch:55 +2022-11-03 05:31:16,026-215-global_steps:156400,grad:2.8404,ce_loss:2.4128 +2022-11-03 05:33:29,562-215-global_steps:156800,grad:2.8588,ce_loss:2.4155 +2022-11-03 05:35:42,232-215-global_steps:157200,grad:2.8710,ce_loss:2.4267 +2022-11-03 05:37:54,701-215-global_steps:157600,grad:2.8768,ce_loss:2.4338 +2022-11-03 05:40:05,953-215-global_steps:158000,grad:2.8802,ce_loss:2.4382 +2022-11-03 05:42:17,918-215-global_steps:158400,grad:2.8822,ce_loss:2.4437 +2022-11-03 05:44:30,422-215-global_steps:158800,grad:2.8840,ce_loss:2.4471 +2022-11-03 05:46:27,967-278-epoch:56 +2022-11-03 05:46:45,238-215-global_steps:159200,grad:2.8414,ce_loss:2.3777 +2022-11-03 05:48:58,096-215-global_steps:159600,grad:2.8625,ce_loss:2.3897 +2022-11-03 05:51:10,215-215-global_steps:160000,grad:2.8743,ce_loss:2.4047 +2022-11-03 05:53:21,816-215-global_steps:160400,grad:2.8814,ce_loss:2.4118 +2022-11-03 05:55:33,962-215-global_steps:160800,grad:2.8864,ce_loss:2.4203 +2022-11-03 05:57:46,694-215-global_steps:161200,grad:2.8889,ce_loss:2.4277 +2022-11-03 05:59:59,876-215-global_steps:161600,grad:2.8898,ce_loss:2.4310 +2022-11-03 06:02:10,968-278-epoch:57 +2022-11-03 06:02:14,441-215-global_steps:162000,grad:2.8132,ce_loss:2.3121 +2022-11-03 06:04:26,485-215-global_steps:162400,grad:2.8733,ce_loss:2.3908 +2022-11-03 06:06:38,825-215-global_steps:162800,grad:2.8819,ce_loss:2.3942 +2022-11-03 06:08:51,989-215-global_steps:163200,grad:2.8870,ce_loss:2.4030 +2022-11-03 06:11:06,462-215-global_steps:163600,grad:2.8912,ce_loss:2.4097 +2022-11-03 06:13:19,383-215-global_steps:164000,grad:2.8947,ce_loss:2.4149 +2022-11-03 06:15:32,488-215-global_steps:164400,grad:2.8962,ce_loss:2.4186 +2022-11-03 06:17:45,403-215-global_steps:164800,grad:2.8986,ce_loss:2.4227 +2022-11-03 06:17:57,939-278-epoch:58 +2022-11-03 06:19:59,920-215-global_steps:165200,grad:2.8701,ce_loss:2.3604 +2022-11-03 06:22:14,778-215-global_steps:165600,grad:2.8856,ce_loss:2.3766 +2022-11-03 06:24:27,766-215-global_steps:166000,grad:2.8920,ce_loss:2.3867 +2022-11-03 06:26:39,995-215-global_steps:166400,grad:2.8964,ce_loss:2.3942 +2022-11-03 06:28:50,802-215-global_steps:166800,grad:2.8986,ce_loss:2.3985 +2022-11-03 06:31:02,630-215-global_steps:167200,grad:2.8996,ce_loss:2.4032 +2022-11-03 06:33:14,655-215-global_steps:167600,grad:2.9021,ce_loss:2.4090 +2022-11-03 06:33:40,931-278-epoch:59 +2022-11-03 06:35:28,902-215-global_steps:168000,grad:2.8817,ce_loss:2.3590 +2022-11-03 06:37:41,720-215-global_steps:168400,grad:2.8979,ce_loss:2.3772 +2022-11-03 06:39:53,257-215-global_steps:168800,grad:2.9025,ce_loss:2.3829 +2022-11-03 06:42:04,599-215-global_steps:169200,grad:2.9092,ce_loss:2.3895 +2022-11-03 06:44:16,530-215-global_steps:169600,grad:2.9101,ce_loss:2.3942 +2022-11-03 06:46:29,928-215-global_steps:170000,grad:2.9118,ce_loss:2.3987 +2022-11-03 06:48:43,136-215-global_steps:170400,grad:2.9114,ce_loss:2.4006 +2022-11-03 06:49:23,247-278-epoch:60 +2022-11-03 06:50:57,619-215-global_steps:170800,grad:2.8835,ce_loss:2.3445 +2022-11-03 06:53:11,291-215-global_steps:171200,grad:2.9012,ce_loss:2.3607 +2022-11-03 06:55:24,963-215-global_steps:171600,grad:2.9041,ce_loss:2.3650 +2022-11-03 06:57:37,628-215-global_steps:172000,grad:2.9063,ce_loss:2.3692 +2022-11-03 06:59:50,480-215-global_steps:172400,grad:2.9112,ce_loss:2.3757 +2022-11-03 07:02:02,774-215-global_steps:172800,grad:2.9138,ce_loss:2.3801 +2022-11-03 07:04:16,120-215-global_steps:173200,grad:2.9142,ce_loss:2.3841 +2022-11-03 07:05:10,375-278-epoch:61 +2022-11-03 07:06:30,520-215-global_steps:173600,grad:2.8925,ce_loss:2.3402 +2022-11-03 07:08:41,961-215-global_steps:174000,grad:2.8984,ce_loss:2.3416 +2022-11-03 07:10:54,184-215-global_steps:174400,grad:2.9076,ce_loss:2.3523 +2022-11-03 07:13:06,730-215-global_steps:174800,grad:2.9148,ce_loss:2.3616 +2022-11-03 07:15:19,840-215-global_steps:175200,grad:2.9169,ce_loss:2.3667 +2022-11-03 07:17:31,780-215-global_steps:175600,grad:2.9176,ce_loss:2.3696 +2022-11-03 07:19:43,968-215-global_steps:176000,grad:2.9196,ce_loss:2.3742 +2022-11-03 07:20:51,454-278-epoch:62 +2022-11-03 07:21:58,801-215-global_steps:176400,grad:2.8870,ce_loss:2.3211 +2022-11-03 07:24:12,153-215-global_steps:176800,grad:2.9085,ce_loss:2.3331 +2022-11-03 07:26:24,837-215-global_steps:177200,grad:2.9149,ce_loss:2.3422 +2022-11-03 07:28:38,175-215-global_steps:177600,grad:2.9210,ce_loss:2.3516 +2022-11-03 07:30:50,541-215-global_steps:178000,grad:2.9228,ce_loss:2.3565 +2022-11-03 07:33:03,452-215-global_steps:178400,grad:2.9242,ce_loss:2.3615 +2022-11-03 07:35:17,907-215-global_steps:178800,grad:2.9245,ce_loss:2.3638 +2022-11-03 07:36:39,622-278-epoch:63 +2022-11-03 07:37:31,407-215-global_steps:179200,grad:2.8883,ce_loss:2.2997 +2022-11-03 07:39:43,764-215-global_steps:179600,grad:2.9019,ce_loss:2.3135 +2022-11-03 07:41:55,902-215-global_steps:180000,grad:2.9128,ce_loss:2.3248 +2022-11-03 07:44:07,684-215-global_steps:180400,grad:2.9202,ce_loss:2.3344 +2022-11-03 07:46:18,050-215-global_steps:180800,grad:2.9231,ce_loss:2.3397 +2022-11-03 07:48:28,016-215-global_steps:181200,grad:2.9251,ce_loss:2.3439 +2022-11-03 07:50:38,454-215-global_steps:181600,grad:2.9262,ce_loss:2.3481 +2022-11-03 07:52:11,812-278-epoch:64 +2022-11-03 07:52:49,305-215-global_steps:182000,grad:2.8838,ce_loss:2.2808 +2022-11-03 07:54:58,302-215-global_steps:182400,grad:2.9138,ce_loss:2.3029 +2022-11-03 07:57:08,090-215-global_steps:182800,grad:2.9259,ce_loss:2.3173 +2022-11-03 07:59:18,091-215-global_steps:183200,grad:2.9320,ce_loss:2.3253 +2022-11-03 08:01:27,313-215-global_steps:183600,grad:2.9333,ce_loss:2.3295 +2022-11-03 08:03:36,612-215-global_steps:184000,grad:2.9348,ce_loss:2.3336 +2022-11-03 08:05:46,260-215-global_steps:184400,grad:2.9362,ce_loss:2.3381 +2022-11-03 08:07:34,140-278-epoch:65 +2022-11-03 08:07:57,954-215-global_steps:184800,grad:2.8752,ce_loss:2.2530 +2022-11-03 08:10:07,930-215-global_steps:185200,grad:2.9177,ce_loss:2.2897 +2022-11-03 08:12:17,300-215-global_steps:185600,grad:2.9291,ce_loss:2.3023 +2022-11-03 08:14:26,980-215-global_steps:186000,grad:2.9347,ce_loss:2.3116 +2022-11-03 08:16:36,439-215-global_steps:186400,grad:2.9394,ce_loss:2.3205 +2022-11-03 08:18:46,109-215-global_steps:186800,grad:2.9416,ce_loss:2.3263 +2022-11-03 08:20:55,440-215-global_steps:187200,grad:2.9430,ce_loss:2.3302 +2022-11-03 08:22:56,502-278-epoch:66 +2022-11-03 08:23:06,891-215-global_steps:187600,grad:2.8834,ce_loss:2.2681 +2022-11-03 08:25:16,698-215-global_steps:188000,grad:2.9092,ce_loss:2.2779 +2022-11-03 08:27:26,676-215-global_steps:188400,grad:2.9277,ce_loss:2.2914 +2022-11-03 08:29:36,285-215-global_steps:188800,grad:2.9346,ce_loss:2.2990 +2022-11-03 08:31:46,145-215-global_steps:189200,grad:2.9386,ce_loss:2.3047 +2022-11-03 08:33:56,738-215-global_steps:189600,grad:2.9416,ce_loss:2.3111 +2022-11-03 08:36:06,813-215-global_steps:190000,grad:2.9433,ce_loss:2.3151 +2022-11-03 08:38:16,189-215-global_steps:190400,grad:2.9441,ce_loss:2.3189 +2022-11-03 08:38:20,882-278-epoch:67 +2022-11-03 08:40:26,583-215-global_steps:190800,grad:2.9246,ce_loss:2.2733 +2022-11-03 08:42:36,295-215-global_steps:191200,grad:2.9352,ce_loss:2.2837 +2022-11-03 08:44:45,739-215-global_steps:191600,grad:2.9390,ce_loss:2.2898 +2022-11-03 08:46:54,369-215-global_steps:192000,grad:2.9433,ce_loss:2.2961 +2022-11-03 08:49:03,313-215-global_steps:192400,grad:2.9455,ce_loss:2.3006 +2022-11-03 08:51:13,190-215-global_steps:192800,grad:2.9484,ce_loss:2.3049 +2022-11-03 08:53:22,796-215-global_steps:193200,grad:2.9498,ce_loss:2.3086 +2022-11-03 08:53:41,143-278-epoch:68 +2022-11-03 08:55:33,199-215-global_steps:193600,grad:2.9324,ce_loss:2.2657 +2022-11-03 08:57:42,557-215-global_steps:194000,grad:2.9420,ce_loss:2.2735 +2022-11-03 08:59:53,626-215-global_steps:194400,grad:2.9470,ce_loss:2.2803 +2022-11-03 09:02:04,603-215-global_steps:194800,grad:2.9507,ce_loss:2.2876 +2022-11-03 09:04:15,001-215-global_steps:195200,grad:2.9531,ce_loss:2.2928 +2022-11-03 09:06:24,509-215-global_steps:195600,grad:2.9553,ce_loss:2.2976 +2022-11-03 09:08:34,298-215-global_steps:196000,grad:2.9552,ce_loss:2.2996 +2022-11-03 09:09:06,622-278-epoch:69 +2022-11-03 09:10:45,656-215-global_steps:196400,grad:2.9237,ce_loss:2.2415 +2022-11-03 09:12:55,437-215-global_steps:196800,grad:2.9388,ce_loss:2.2586 +2022-11-03 09:15:04,935-215-global_steps:197200,grad:2.9447,ce_loss:2.2654 +2022-11-03 09:17:14,807-215-global_steps:197600,grad:2.9504,ce_loss:2.2730 +2022-11-03 09:19:25,658-215-global_steps:198000,grad:2.9541,ce_loss:2.2793 +2022-11-03 09:21:35,282-215-global_steps:198400,grad:2.9568,ce_loss:2.2851 +2022-11-03 09:23:44,495-215-global_steps:198800,grad:2.9584,ce_loss:2.2893 +2022-11-03 09:24:30,271-278-epoch:70 +2022-11-03 09:25:56,108-215-global_steps:199200,grad:2.9220,ce_loss:2.2288 +2022-11-03 09:28:07,011-215-global_steps:199600,grad:2.9440,ce_loss:2.2495 +2022-11-03 09:30:18,044-215-global_steps:200000,grad:2.9497,ce_loss:2.2572 +2022-11-03 09:32:28,131-215-global_steps:200400,grad:2.9567,ce_loss:2.2669 +2022-11-03 09:34:38,086-215-global_steps:200800,grad:2.9597,ce_loss:2.2723 +2022-11-03 09:36:48,901-215-global_steps:201200,grad:2.9614,ce_loss:2.2768 +2022-11-03 09:38:59,065-215-global_steps:201600,grad:2.9621,ce_loss:2.2805 +2022-11-03 09:39:58,351-278-epoch:71 +2022-11-03 09:41:10,045-215-global_steps:202000,grad:2.9314,ce_loss:2.2219 +2022-11-03 09:43:20,144-215-global_steps:202400,grad:2.9441,ce_loss:2.2360 +2022-11-03 09:45:30,653-215-global_steps:202800,grad:2.9552,ce_loss:2.2485 +2022-11-03 09:47:41,657-215-global_steps:203200,grad:2.9608,ce_loss:2.2561 +2022-11-03 09:49:51,591-215-global_steps:203600,grad:2.9646,ce_loss:2.2629 +2022-11-03 09:52:00,869-215-global_steps:204000,grad:2.9670,ce_loss:2.2689 +2022-11-03 09:54:12,109-215-global_steps:204400,grad:2.9682,ce_loss:2.2726 +2022-11-03 09:55:25,946-278-epoch:72 +2022-11-03 09:56:24,475-215-global_steps:204800,grad:2.9293,ce_loss:2.2144 +2022-11-03 09:58:34,716-215-global_steps:205200,grad:2.9485,ce_loss:2.2306 +2022-11-03 10:00:45,160-215-global_steps:205600,grad:2.9556,ce_loss:2.2383 +2022-11-03 10:02:55,531-215-global_steps:206000,grad:2.9618,ce_loss:2.2460 +2022-11-03 10:05:06,637-215-global_steps:206400,grad:2.9670,ce_loss:2.2535 +2022-11-03 10:07:16,854-215-global_steps:206800,grad:2.9686,ce_loss:2.2590 +2022-11-03 10:09:26,633-215-global_steps:207200,grad:2.9693,ce_loss:2.2631 +2022-11-03 10:10:52,954-278-epoch:73 +2022-11-03 10:11:37,382-215-global_steps:207600,grad:2.9242,ce_loss:2.1992 +2022-11-03 10:13:48,172-215-global_steps:208000,grad:2.9476,ce_loss:2.2171 +2022-11-03 10:15:58,512-215-global_steps:208400,grad:2.9567,ce_loss:2.2245 +2022-11-03 10:18:08,219-215-global_steps:208800,grad:2.9634,ce_loss:2.2340 +2022-11-03 10:20:18,479-215-global_steps:209200,grad:2.9669,ce_loss:2.2397 +2022-11-03 10:22:29,409-215-global_steps:209600,grad:2.9690,ce_loss:2.2452 +2022-11-03 10:24:39,626-215-global_steps:210000,grad:2.9708,ce_loss:2.2511 +2022-11-03 10:26:20,137-278-epoch:74 +2022-11-03 10:26:51,121-215-global_steps:210400,grad:2.9155,ce_loss:2.1806 +2022-11-03 10:29:01,702-215-global_steps:210800,grad:2.9490,ce_loss:2.2141 +2022-11-03 10:31:13,224-215-global_steps:211200,grad:2.9596,ce_loss:2.2211 +2022-11-03 10:33:23,866-215-global_steps:211600,grad:2.9666,ce_loss:2.2261 +2022-11-03 10:35:34,288-215-global_steps:212000,grad:2.9722,ce_loss:2.2329 +2022-11-03 10:37:44,803-215-global_steps:212400,grad:2.9744,ce_loss:2.2389 +2022-11-03 10:39:55,261-215-global_steps:212800,grad:2.9759,ce_loss:2.2429 +2022-11-03 10:41:49,503-278-epoch:75 +2022-11-03 10:42:07,009-215-global_steps:213200,grad:2.9274,ce_loss:2.1701 +2022-11-03 10:44:15,447-215-global_steps:213600,grad:2.9566,ce_loss:2.1995 +2022-11-03 10:46:23,976-215-global_steps:214000,grad:2.9643,ce_loss:2.2116 +2022-11-03 10:48:33,021-215-global_steps:214400,grad:2.9715,ce_loss:2.2180 +2022-11-03 10:50:41,634-215-global_steps:214800,grad:2.9765,ce_loss:2.2257 +2022-11-03 10:52:49,416-215-global_steps:215200,grad:2.9807,ce_loss:2.2318 +2022-11-03 10:54:57,545-215-global_steps:215600,grad:2.9825,ce_loss:2.2365 +2022-11-03 10:57:03,500-278-epoch:76 +2022-11-03 10:57:07,350-215-global_steps:216000,grad:2.8677,ce_loss:2.1556 +2022-11-03 10:59:15,794-215-global_steps:216400,grad:2.9441,ce_loss:2.1787 +2022-11-03 11:01:23,719-215-global_steps:216800,grad:2.9587,ce_loss:2.1920 +2022-11-03 11:03:31,412-215-global_steps:217200,grad:2.9702,ce_loss:2.2051 +2022-11-03 11:05:39,734-215-global_steps:217600,grad:2.9765,ce_loss:2.2150 +2022-11-03 11:07:48,151-215-global_steps:218000,grad:2.9812,ce_loss:2.2224 +2022-11-03 11:09:55,306-215-global_steps:218400,grad:2.9835,ce_loss:2.2278 +2022-11-03 11:12:02,110-215-global_steps:218800,grad:2.9852,ce_loss:2.2331 +2022-11-03 11:12:13,263-278-epoch:77 +2022-11-03 11:14:10,909-215-global_steps:219200,grad:2.9509,ce_loss:2.1818 +2022-11-03 11:16:19,029-215-global_steps:219600,grad:2.9649,ce_loss:2.1922 +2022-11-03 11:18:26,599-215-global_steps:220000,grad:2.9730,ce_loss:2.1995 +2022-11-03 11:20:33,933-215-global_steps:220400,grad:2.9798,ce_loss:2.2095 +2022-11-03 11:22:41,211-215-global_steps:220800,grad:2.9833,ce_loss:2.2153 +2022-11-03 11:24:49,307-215-global_steps:221200,grad:2.9863,ce_loss:2.2204 +2022-11-03 11:26:57,219-215-global_steps:221600,grad:2.9878,ce_loss:2.2257 +2022-11-03 11:27:21,772-278-epoch:78 +2022-11-03 11:29:05,924-215-global_steps:222000,grad:2.9469,ce_loss:2.1621 +2022-11-03 11:31:13,019-215-global_steps:222400,grad:2.9673,ce_loss:2.1806 +2022-11-03 11:33:20,720-215-global_steps:222800,grad:2.9745,ce_loss:2.1905 +2022-11-03 11:35:28,822-215-global_steps:223200,grad:2.9799,ce_loss:2.2006 +2022-11-03 11:37:36,404-215-global_steps:223600,grad:2.9842,ce_loss:2.2066 +2022-11-03 11:39:43,716-215-global_steps:224000,grad:2.9876,ce_loss:2.2109 +2022-11-03 11:41:51,234-215-global_steps:224400,grad:2.9884,ce_loss:2.2143 +2022-11-03 11:42:29,246-278-epoch:79 +2022-11-03 11:44:00,599-215-global_steps:224800,grad:2.9572,ce_loss:2.1620 +2022-11-03 11:46:08,336-215-global_steps:225200,grad:2.9687,ce_loss:2.1735 +2022-11-03 11:48:15,633-215-global_steps:225600,grad:2.9803,ce_loss:2.1839 +2022-11-03 11:50:23,337-215-global_steps:226000,grad:2.9861,ce_loss:2.1913 +2022-11-03 11:52:31,224-215-global_steps:226400,grad:2.9898,ce_loss:2.1971 +2022-11-03 11:54:38,945-215-global_steps:226800,grad:2.9911,ce_loss:2.2017 +2022-11-03 11:56:46,108-215-global_steps:227200,grad:2.9931,ce_loss:2.2070 +2022-11-03 11:57:37,047-278-epoch:80 +2022-11-03 11:58:54,602-215-global_steps:227600,grad:2.9545,ce_loss:2.1515 +2022-11-03 12:01:02,389-215-global_steps:228000,grad:2.9657,ce_loss:2.1604 +2022-11-03 12:03:10,125-215-global_steps:228400,grad:2.9786,ce_loss:2.1740 +2022-11-03 12:05:17,306-215-global_steps:228800,grad:2.9855,ce_loss:2.1824 +2022-11-03 12:07:24,790-215-global_steps:229200,grad:2.9890,ce_loss:2.1875 +2022-11-03 12:09:33,108-215-global_steps:229600,grad:2.9920,ce_loss:2.1936 +2022-11-03 12:11:41,591-215-global_steps:230000,grad:2.9925,ce_loss:2.1966 +2022-11-03 12:12:46,457-278-epoch:81 +2022-11-03 12:13:50,779-215-global_steps:230400,grad:2.9679,ce_loss:2.1570 +2022-11-03 12:15:58,325-215-global_steps:230800,grad:2.9783,ce_loss:2.1643 +2022-11-03 12:18:05,926-215-global_steps:231200,grad:2.9863,ce_loss:2.1707 +2022-11-03 12:20:14,487-215-global_steps:231600,grad:2.9906,ce_loss:2.1768 +2022-11-03 12:22:22,788-215-global_steps:232000,grad:2.9927,ce_loss:2.1805 +2022-11-03 12:24:30,269-215-global_steps:232400,grad:2.9946,ce_loss:2.1839 +2022-11-03 12:26:38,075-215-global_steps:232800,grad:2.9974,ce_loss:2.1896 +2022-11-03 12:27:56,848-278-epoch:82 +2022-11-03 12:28:48,305-215-global_steps:233200,grad:2.9780,ce_loss:2.1466 +2022-11-03 12:30:56,788-215-global_steps:233600,grad:2.9809,ce_loss:2.1512 +2022-11-03 12:33:04,982-215-global_steps:234000,grad:2.9890,ce_loss:2.1602 +2022-11-03 12:35:13,005-215-global_steps:234400,grad:2.9969,ce_loss:2.1702 +2022-11-03 12:37:21,579-215-global_steps:234800,grad:2.9990,ce_loss:2.1755 +2022-11-03 12:39:30,147-215-global_steps:235200,grad:3.0024,ce_loss:2.1828 +2022-11-03 12:41:38,430-215-global_steps:235600,grad:3.0040,ce_loss:2.1873 +2022-11-03 12:43:09,998-278-epoch:83 +2022-11-03 12:43:47,349-215-global_steps:236000,grad:2.9536,ce_loss:2.1257 +2022-11-03 12:45:55,436-215-global_steps:236400,grad:2.9691,ce_loss:2.1383 +2022-11-03 12:48:04,171-215-global_steps:236800,grad:2.9859,ce_loss:2.1519 +2022-11-03 12:50:12,986-215-global_steps:237200,grad:2.9922,ce_loss:2.1599 +2022-11-03 12:52:21,076-215-global_steps:237600,grad:2.9959,ce_loss:2.1659 +2022-11-03 12:54:28,681-215-global_steps:238000,grad:2.9985,ce_loss:2.1707 +2022-11-03 12:56:36,812-215-global_steps:238400,grad:3.0007,ce_loss:2.1757 +2022-11-03 12:58:22,053-278-epoch:84 +2022-11-03 12:58:46,357-215-global_steps:238800,grad:2.9527,ce_loss:2.1222 +2022-11-03 13:00:53,998-215-global_steps:239200,grad:2.9716,ce_loss:2.1293 +2022-11-03 13:03:01,342-215-global_steps:239600,grad:2.9855,ce_loss:2.1399 +2022-11-03 13:05:09,434-215-global_steps:240000,grad:2.9937,ce_loss:2.1484 +2022-11-03 13:07:17,837-215-global_steps:240400,grad:2.9986,ce_loss:2.1562 +2022-11-03 13:09:26,177-215-global_steps:240800,grad:3.0012,ce_loss:2.1615 +2022-11-03 13:11:34,295-215-global_steps:241200,grad:3.0051,ce_loss:2.1672 +2022-11-03 13:13:32,790-278-epoch:85 +2022-11-03 13:13:43,741-215-global_steps:241600,grad:2.9229,ce_loss:2.0841 +2022-11-03 13:15:52,447-215-global_steps:242000,grad:2.9736,ce_loss:2.1148 +2022-11-03 13:18:02,036-215-global_steps:242400,grad:2.9846,ce_loss:2.1285 +2022-11-03 13:20:10,975-215-global_steps:242800,grad:2.9934,ce_loss:2.1372 +2022-11-03 13:22:19,657-215-global_steps:243200,grad:3.0000,ce_loss:2.1455 +2022-11-03 13:24:28,545-215-global_steps:243600,grad:3.0040,ce_loss:2.1518 +2022-11-03 13:26:37,278-215-global_steps:244000,grad:3.0064,ce_loss:2.1576 +2022-11-03 13:28:45,946-215-global_steps:244400,grad:3.0085,ce_loss:2.1633 +2022-11-03 13:28:49,915-278-epoch:86 +2022-11-03 13:30:55,065-215-global_steps:244800,grad:2.9735,ce_loss:2.1080 +2022-11-03 13:33:03,322-215-global_steps:245200,grad:2.9877,ce_loss:2.1206 +2022-11-03 13:35:12,240-215-global_steps:245600,grad:2.9989,ce_loss:2.1335 +2022-11-03 13:37:21,002-215-global_steps:246000,grad:3.0044,ce_loss:2.1412 +2022-11-03 13:39:28,920-215-global_steps:246400,grad:3.0083,ce_loss:2.1478 +2022-11-03 13:41:37,089-215-global_steps:246800,grad:3.0093,ce_loss:2.1512 +2022-11-03 13:43:45,953-215-global_steps:247200,grad:3.0100,ce_loss:2.1543 +2022-11-03 13:44:03,504-278-epoch:87 +2022-11-03 13:45:55,970-215-global_steps:247600,grad:2.9836,ce_loss:2.1172 +2022-11-03 13:48:03,904-215-global_steps:248000,grad:2.9969,ce_loss:2.1270 +2022-11-03 13:50:11,407-215-global_steps:248400,grad:3.0046,ce_loss:2.1336 +2022-11-03 13:52:19,211-215-global_steps:248800,grad:3.0101,ce_loss:2.1393 +2022-11-03 13:54:27,614-215-global_steps:249200,grad:3.0136,ce_loss:2.1432 +2022-11-03 13:56:35,933-215-global_steps:249600,grad:3.0144,ce_loss:2.1470 +2022-11-03 13:58:43,430-215-global_steps:250000,grad:3.0157,ce_loss:2.1515 +2022-11-03 13:59:14,325-278-epoch:88 +2022-11-03 14:00:52,890-215-global_steps:250400,grad:2.9805,ce_loss:2.1033 +2022-11-03 14:03:01,416-215-global_steps:250800,grad:2.9976,ce_loss:2.1153 +2022-11-03 14:05:09,875-215-global_steps:251200,grad:3.0073,ce_loss:2.1264 +2022-11-03 14:07:18,022-215-global_steps:251600,grad:3.0116,ce_loss:2.1318 +2022-11-03 14:09:25,532-215-global_steps:252000,grad:3.0149,ce_loss:2.1367 +2022-11-03 14:11:33,963-215-global_steps:252400,grad:3.0150,ce_loss:2.1396 +2022-11-03 14:13:42,002-215-global_steps:252800,grad:3.0165,ce_loss:2.1444 +2022-11-03 14:14:26,620-278-epoch:89 +2022-11-03 14:15:51,258-215-global_steps:253200,grad:2.9800,ce_loss:2.0964 +2022-11-03 14:17:58,641-215-global_steps:253600,grad:2.9960,ce_loss:2.1050 +2022-11-03 14:20:06,676-215-global_steps:254000,grad:3.0024,ce_loss:2.1123 +2022-11-03 14:22:15,585-215-global_steps:254400,grad:3.0070,ce_loss:2.1184 +2022-11-03 14:24:23,698-215-global_steps:254800,grad:3.0112,ce_loss:2.1238 +2022-11-03 14:26:31,587-215-global_steps:255200,grad:3.0148,ce_loss:2.1295 +2022-11-03 14:28:39,697-215-global_steps:255600,grad:3.0173,ce_loss:2.1352 +2022-11-03 14:29:37,791-278-epoch:90 +2022-11-03 14:30:49,655-215-global_steps:256000,grad:2.9786,ce_loss:2.0839 +2022-11-03 14:32:58,130-215-global_steps:256400,grad:2.9955,ce_loss:2.0981 +2022-11-03 14:35:05,386-215-global_steps:256800,grad:3.0046,ce_loss:2.1079 +2022-11-03 14:37:12,670-215-global_steps:257200,grad:3.0110,ce_loss:2.1162 +2022-11-03 14:39:20,550-215-global_steps:257600,grad:3.0159,ce_loss:2.1221 +2022-11-03 14:41:28,577-215-global_steps:258000,grad:3.0178,ce_loss:2.1256 +2022-11-03 14:43:36,170-215-global_steps:258400,grad:3.0206,ce_loss:2.1295 +2022-11-03 14:44:46,817-278-epoch:91 +2022-11-03 14:45:44,762-215-global_steps:258800,grad:2.9729,ce_loss:2.0642 +2022-11-03 14:47:52,839-215-global_steps:259200,grad:2.9909,ce_loss:2.0834 +2022-11-03 14:50:00,963-215-global_steps:259600,grad:3.0050,ce_loss:2.0955 +2022-11-03 14:52:08,864-215-global_steps:260000,grad:3.0124,ce_loss:2.1049 +2022-11-03 14:54:16,245-215-global_steps:260400,grad:3.0172,ce_loss:2.1118 +2022-11-03 14:56:24,056-215-global_steps:260800,grad:3.0211,ce_loss:2.1175 +2022-11-03 14:58:32,344-215-global_steps:261200,grad:3.0231,ce_loss:2.1223 +2022-11-03 14:59:57,291-278-epoch:92 +2022-11-03 15:00:41,492-215-global_steps:261600,grad:2.9611,ce_loss:2.0588 +2022-11-03 15:02:49,417-215-global_steps:262000,grad:2.9952,ce_loss:2.0777 +2022-11-03 15:04:56,998-215-global_steps:262400,grad:3.0080,ce_loss:2.0912 +2022-11-03 15:07:05,789-215-global_steps:262800,grad:3.0169,ce_loss:2.1018 +2022-11-03 15:09:13,413-215-global_steps:263200,grad:3.0205,ce_loss:2.1070 +2022-11-03 15:11:20,702-215-global_steps:263600,grad:3.0240,ce_loss:2.1132 +2022-11-03 15:13:27,764-215-global_steps:264000,grad:3.0253,ce_loss:2.1171 +2022-11-03 15:15:05,793-278-epoch:93 +2022-11-03 15:15:36,923-215-global_steps:264400,grad:2.9739,ce_loss:2.0559 +2022-11-03 15:17:45,120-215-global_steps:264800,grad:2.9971,ce_loss:2.0776 +2022-11-03 15:19:53,133-215-global_steps:265200,grad:3.0098,ce_loss:2.0862 +2022-11-03 15:22:00,413-215-global_steps:265600,grad:3.0168,ce_loss:2.0935 +2022-11-03 15:24:08,131-215-global_steps:266000,grad:3.0219,ce_loss:2.0994 +2022-11-03 15:26:16,534-215-global_steps:266400,grad:3.0253,ce_loss:2.1058 +2022-11-03 15:28:24,766-215-global_steps:266800,grad:3.0277,ce_loss:2.1103 +2022-11-03 15:30:16,469-278-epoch:94 +2022-11-03 15:30:34,085-215-global_steps:267200,grad:2.9591,ce_loss:2.0438 +2022-11-03 15:32:42,439-215-global_steps:267600,grad:2.9958,ce_loss:2.0642 +2022-11-03 15:34:51,325-215-global_steps:268000,grad:3.0096,ce_loss:2.0767 +2022-11-03 15:36:59,863-215-global_steps:268400,grad:3.0155,ce_loss:2.0844 +2022-11-03 15:39:07,839-215-global_steps:268800,grad:3.0210,ce_loss:2.0921 +2022-11-03 15:41:16,085-215-global_steps:269200,grad:3.0240,ce_loss:2.0972 +2022-11-03 15:43:24,273-215-global_steps:269600,grad:3.0276,ce_loss:2.1020 +2022-11-03 15:45:29,932-278-epoch:95 +2022-11-03 15:45:34,405-215-global_steps:270000,grad:2.9810,ce_loss:2.0681 +2022-11-03 15:47:42,649-215-global_steps:270400,grad:3.0089,ce_loss:2.0633 +2022-11-03 15:49:50,335-215-global_steps:270800,grad:3.0144,ce_loss:2.0747 +2022-11-03 15:51:58,521-215-global_steps:271200,grad:3.0225,ce_loss:2.0848 +2022-11-03 15:54:06,853-215-global_steps:271600,grad:3.0252,ce_loss:2.0888 +2022-11-03 15:56:15,554-215-global_steps:272000,grad:3.0283,ce_loss:2.0942 +2022-11-03 15:58:23,250-215-global_steps:272400,grad:3.0307,ce_loss:2.0995 +2022-11-03 16:00:31,386-215-global_steps:272800,grad:3.0323,ce_loss:2.1038 +2022-11-03 16:00:41,961-278-epoch:96 +2022-11-03 16:02:41,545-215-global_steps:273200,grad:2.9950,ce_loss:2.0567 +2022-11-03 16:04:50,166-215-global_steps:273600,grad:3.0148,ce_loss:2.0679 +2022-11-03 16:06:58,347-215-global_steps:274000,grad:3.0230,ce_loss:2.0778 +2022-11-03 16:09:06,093-215-global_steps:274400,grad:3.0271,ce_loss:2.0841 +2022-11-03 16:11:14,476-215-global_steps:274800,grad:3.0300,ce_loss:2.0879 +2022-11-03 16:13:22,969-215-global_steps:275200,grad:3.0331,ce_loss:2.0921 +2022-11-03 16:15:31,588-215-global_steps:275600,grad:3.0353,ce_loss:2.0968 +2022-11-03 16:15:55,645-278-epoch:97 +2022-11-03 16:17:40,564-215-global_steps:276000,grad:2.9970,ce_loss:2.0445 +2022-11-03 16:19:48,774-215-global_steps:276400,grad:3.0091,ce_loss:2.0552 +2022-11-03 16:21:56,969-215-global_steps:276800,grad:3.0199,ce_loss:2.0648 +2022-11-03 16:24:04,969-215-global_steps:277200,grad:3.0248,ce_loss:2.0720 +2022-11-03 16:26:12,583-215-global_steps:277600,grad:3.0307,ce_loss:2.0796 +2022-11-03 16:28:20,601-215-global_steps:278000,grad:3.0333,ce_loss:2.0842 +2022-11-03 16:30:29,405-215-global_steps:278400,grad:3.0355,ce_loss:2.0895 +2022-11-03 16:31:06,803-278-epoch:98 +2022-11-03 16:32:38,919-215-global_steps:278800,grad:3.0047,ce_loss:2.0483 +2022-11-03 16:34:46,640-215-global_steps:279200,grad:3.0191,ce_loss:2.0551 +2022-11-03 16:36:54,053-215-global_steps:279600,grad:3.0254,ce_loss:2.0607 +2022-11-03 16:39:02,260-215-global_steps:280000,grad:3.0294,ce_loss:2.0682 +2022-11-03 16:41:10,703-215-global_steps:280400,grad:3.0337,ce_loss:2.0753 +2022-11-03 16:43:18,631-215-global_steps:280800,grad:3.0370,ce_loss:2.0814 +2022-11-03 16:45:26,421-215-global_steps:281200,grad:3.0395,ce_loss:2.0866 +2022-11-03 16:46:16,924-278-epoch:99 +2022-11-03 16:47:35,912-215-global_steps:281600,grad:2.9867,ce_loss:2.0267 +2022-11-03 16:49:44,646-215-global_steps:282000,grad:3.0033,ce_loss:2.0389 +2022-11-03 16:51:53,189-215-global_steps:282400,grad:3.0165,ce_loss:2.0528 +2022-11-03 16:54:00,911-215-global_steps:282800,grad:3.0237,ce_loss:2.0601 +2022-11-03 16:56:08,773-215-global_steps:283200,grad:3.0306,ce_loss:2.0680 +2022-11-03 16:58:17,695-215-global_steps:283600,grad:3.0346,ce_loss:2.0745 +2022-11-03 17:00:26,232-215-global_steps:284000,grad:3.0375,ce_loss:2.0796 diff --git a/flagged/log.csv b/flagged/log.csv new file mode 100644 index 0000000000000000000000000000000000000000..67c32417de4066f4209595e92d11957d4f435668 --- /dev/null +++ b/flagged/log.csv @@ -0,0 +1,4 @@ +wav,identity,pose,output,flag,username,timestamp +D:\Hugging Face\TalkSHOW\flagged\wav\1st-page19e988n3.wav,Oliver,Sit,,,,2023-04-02 20:56:39.911033 +D:\Hugging Face\TalkSHOW\flagged\wav\1st-pagerljzpu_q.wav,Oliver,Sit,,,,2023-04-02 20:56:45.480908 +D:\Hugging Face\TalkSHOW\flagged\wav\1st-pageduhcakq2.wav,Oliver,Sit,,,,2023-04-02 22:24:23.870159 diff --git a/flagged/wav/1st-page19e988n3.wav b/flagged/wav/1st-page19e988n3.wav new file mode 100644 index 0000000000000000000000000000000000000000..973728edd9c4e0bb2825885501988535aab34a18 Binary files /dev/null and b/flagged/wav/1st-page19e988n3.wav differ diff --git a/flagged/wav/1st-pageduhcakq2.wav b/flagged/wav/1st-pageduhcakq2.wav new file mode 100644 index 0000000000000000000000000000000000000000..973728edd9c4e0bb2825885501988535aab34a18 Binary files /dev/null and b/flagged/wav/1st-pageduhcakq2.wav differ diff --git a/flagged/wav/1st-pagerljzpu_q.wav b/flagged/wav/1st-pagerljzpu_q.wav new file mode 100644 index 0000000000000000000000000000000000000000..973728edd9c4e0bb2825885501988535aab34a18 Binary files /dev/null and b/flagged/wav/1st-pagerljzpu_q.wav differ diff --git a/losses/__init__.py b/losses/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..37fdea0eb6c3190e7001567cfe17dc296bf811e8 --- /dev/null +++ b/losses/__init__.py @@ -0,0 +1 @@ +from .losses import * \ No newline at end of file diff --git a/losses/__pycache__/__init__.cpython-37.pyc b/losses/__pycache__/__init__.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b963dec5a4f2167328ad06c5096a0871e8f96b7e Binary files /dev/null and b/losses/__pycache__/__init__.cpython-37.pyc differ diff --git a/losses/__pycache__/losses.cpython-37.pyc b/losses/__pycache__/losses.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..af9dd63ad54695e81c37a490beccd23b3b1a7c18 Binary files /dev/null and b/losses/__pycache__/losses.cpython-37.pyc differ diff --git a/losses/losses.py b/losses/losses.py new file mode 100644 index 0000000000000000000000000000000000000000..c4e433ca256b3ed54b77fbc6ca8751aa32959153 --- /dev/null +++ b/losses/losses.py @@ -0,0 +1,91 @@ +import os +import sys + +sys.path.append(os.getcwd()) + +import torch +import torch.nn as nn +import torch.nn.functional as F +import numpy as np + +class KeypointLoss(nn.Module): + def __init__(self): + super(KeypointLoss, self).__init__() + + def forward(self, pred_seq, gt_seq, gt_conf=None): + #pred_seq: (B, C, T) + if gt_conf is not None: + gt_conf = gt_conf >= 0.01 + return F.mse_loss(pred_seq[gt_conf], gt_seq[gt_conf], reduction='mean') + else: + return F.mse_loss(pred_seq, gt_seq) + + +class KLLoss(nn.Module): + def __init__(self, kl_tolerance): + super(KLLoss, self).__init__() + self.kl_tolerance = kl_tolerance + + def forward(self, mu, var, mul=1): + kl_tolerance = self.kl_tolerance * mul * var.shape[1] / 64 + kld_loss = -0.5 * torch.sum(1 + var - mu**2 - var.exp(), dim=1) + # kld_loss = -0.5 * torch.sum(1 + (var-1) - (mu) ** 2 - (var-1).exp(), dim=1) + if self.kl_tolerance is not None: + # above_line = kld_loss[kld_loss > self.kl_tolerance] + # if len(above_line) > 0: + # kld_loss = torch.mean(kld_loss) + # else: + # kld_loss = 0 + kld_loss = torch.where(kld_loss > kl_tolerance, kld_loss, torch.tensor(kl_tolerance, device='cuda')) + # else: + kld_loss = torch.mean(kld_loss) + return kld_loss + + +class L2KLLoss(nn.Module): + def __init__(self, kl_tolerance): + super(L2KLLoss, self).__init__() + self.kl_tolerance = kl_tolerance + + def forward(self, x): + # TODO: check + kld_loss = torch.sum(x ** 2, dim=1) + if self.kl_tolerance is not None: + above_line = kld_loss[kld_loss > self.kl_tolerance] + if len(above_line) > 0: + kld_loss = torch.mean(kld_loss) + else: + kld_loss = 0 + else: + kld_loss = torch.mean(kld_loss) + return kld_loss + +class L2RegLoss(nn.Module): + def __init__(self): + super(L2RegLoss, self).__init__() + + def forward(self, x): + #TODO: check + return torch.sum(x**2) + + +class L2Loss(nn.Module): + def __init__(self): + super(L2Loss, self).__init__() + + def forward(self, x): + # TODO: check + return torch.sum(x ** 2) + + +class AudioLoss(nn.Module): + def __init__(self): + super(AudioLoss, self).__init__() + + def forward(self, dynamics, gt_poses): + #pay attention, normalized + mean = torch.mean(gt_poses, dim=-1).unsqueeze(-1) + gt = gt_poses - mean + return F.mse_loss(dynamics, gt) + +L1Loss = nn.L1Loss \ No newline at end of file diff --git a/mesh-master/.gitignore b/mesh-master/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..700162d56aac1ec35b258d425b7ce950b47c89bb --- /dev/null +++ b/mesh-master/.gitignore @@ -0,0 +1,12 @@ +build/* +_build +*.pyc +temporary_test +dist +MANIFEST +*.xml +*.egg-info +doc/build +.eggs +.idea +.noseids diff --git a/mesh-master/CGAL_LICENSE.pdf b/mesh-master/CGAL_LICENSE.pdf new file mode 100644 index 0000000000000000000000000000000000000000..64cb892fb676a5a96a3314e06fa3662545a94f66 Binary files /dev/null and b/mesh-master/CGAL_LICENSE.pdf differ diff --git a/mesh-master/LICENSE.txt b/mesh-master/LICENSE.txt new file mode 100644 index 0000000000000000000000000000000000000000..e48a2b6e5a29ead1bd12a7b4b5dba577a3104c1c --- /dev/null +++ b/mesh-master/LICENSE.txt @@ -0,0 +1,30 @@ + +Max-Planck grants you a non-exclusive, non-transferable, free of charge right to +use the *psbody-mesh package* on computers owned, leased or otherwise controlled +by you and/or your organization for the sole purpose of performing non-commercial +scientific research. + +Any other use, in particular any use for commercial purposes, is prohibited. +This includes, without limitation, incorporation in a commercial product, use in +a commercial service, or production of other artifacts for commercial purposes +including, for example, web services, movies, television programs, or video +games. The Model may not be reproduced, modified and/or made available in any +form to any third party without MPG’s prior written permission. By using the +Model, you agree not to reverse engineer it. + +You expressly acknowledge and agree that the Model is provided “AS IS”, may +contain errors, and that any use of the Model is at your sole risk. MAX PLANCK +MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE DATA, NEITHER +EXPRESS NOR IMPLIED, AND THE ABSENCE OF ANY LEGAL OR ACTUAL DEFECTS, WHETHER +DISCOVERABLE OR NOT. Specifically, and not to limit the foregoing, Max-Planck +makes no representations or warranties (i) regarding the merchantability or +fitness for a particular purpose of the Model, (ii) that the use of the Model +will not infringe any patents, copyrights or other intellectual property rights +of a third party, and (iii) that the use of the Model will not cause any damage +of any kind to you or a third party. + +Under no circumstances shall Max-Planck be liable for any incidental, special, +indirect or consequential damages arising out of or relating to this license, +including but not limited to, any lost profits, business interruption, loss of +programs or other data, or all other commercial damages or losses, even if +advised of the possibility thereof. diff --git a/mesh-master/MANIFEST.in b/mesh-master/MANIFEST.in new file mode 100644 index 0000000000000000000000000000000000000000..2df7f462bdb44c86f61a1f6d79381a9a63ad06cc --- /dev/null +++ b/mesh-master/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include mesh/src * +recursive-include mesh/ressources * \ No newline at end of file diff --git a/mesh-master/Makefile b/mesh-master/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..f89aaf948c2a039bf63f65ce3531dac300560126 --- /dev/null +++ b/mesh-master/Makefile @@ -0,0 +1,48 @@ +# Makefile for mesh package +package_name := mesh_package + +all: + @echo "\033[0;36m----- [" ${package_name} "] Installing with the interpreter `which python` (version `python --version | cut -d' ' -f2`)\033[0m" + @pip install --upgrade -r requirements.txt && pip list + @pip install --no-deps --install-option="--boost-location=$$BOOST_INCLUDE_DIRS" --verbose --no-cache-dir . + +import_tests: + @echo "\033[0;33m----- [" ${package_name} "] Performing import tests\033[0m" + @PSBODY_MESH_CACHE=`mktemp -d -t mesh_package.XXXXXXXXXX` python -c "from psbody.mesh.mesh import Mesh" + @python -c "from psbody.mesh.meshviewer import MeshViewers" + @echo "\033[0;33m----- [" ${package_name} "] OK import tests\033[0m" + +unit_tests: + @if test "$(USE_NOSE)" = "" ; then \ + echo "\033[0;33m----- [" ${package_name} "] Running tests using unittest, no report file\033[0m" ; \ + PSBODY_MESH_CACHE=`mktemp -d -t mesh_package.XXXXXXXXXX` python -m unittest -v ; \ + else \ + echo "\033[0;33m----- [" ${package_name} "] Running tests using nosetests\033[0m" ; \ + pip install nose ; \ + PSBODY_MESH_CACHE=`mktemp -d -t mesh_package.XXXXXXXXXX` nosetests -v --with-xunit; \ + fi ; + +tests: import_tests unit_tests + +# Creating source distribution +sdist: + @echo "\033[0;33m----- [" ${package_name} "] Creating the source distribution\033[0m" + @python setup.py sdist + +# Creating wheel distribution +wheel: + @echo "\033[0;33m----- [" ${package_name} "] Creating the wheel distribution\033[0m" + @pip install wheel + @python setup.py --verbose build_ext --boost-location=$$BOOST_INCLUDE_DIRS bdist_wheel + +# Build documentation +documentation: + @echo "\033[0;33m----- [" ${package_name} "] Building Sphinx documentation\033[0m" + @pip install -U sphinx sphinx_bootstrap_theme + @cd doc && make html + +clean: + @rm -rf build + @rm -rf dist + @rm -rf psbody_mesh.egg-info + @rm -rf *.xml diff --git a/mesh-master/README.md b/mesh-master/README.md new file mode 100644 index 0000000000000000000000000000000000000000..72103d9eec88ed22277a44e4b0938888871a32eb --- /dev/null +++ b/mesh-master/README.md @@ -0,0 +1,156 @@ +[![Build Status]( + https://raw.githubusercontent.com/MPI-IS-BambooAgent/sw_badges/master/badges/plans/ps-body-mesh/tag.svg?sanitize=true)]( + https://atlas.is.localnet/bamboo/browse/PS-FMP/latest) + +Perceiving Systems Mesh Package +=============================== + +This package contains core functions for manipulating meshes and +visualizing them. It requires ``Python 3.5+`` and is supported on +Linux and macOS operating systems. + +The ``Mesh`` processing libraries support several of our projects such as +* [CoMA: Convolutional Mesh Encoders for Generating 3D Faces](http://coma.is.tue.mpg.de/) +* [FLAME: Learning a model of facial shape and expression from 4D scans](http://flame.is.tue.mpg.de/) +* [MANO: Modeling and Capturing Hands and Bodies Together](http://mano.is.tue.mpg.de/) +* [SMPL: A Skinned Multi-Person Linear Model](http://smpl.is.tue.mpg.de/) +* [VOCA: Voice Operated Character Animation](https://github.com/TimoBolkart/voca) +* [RingNet: 3D Face Shape and Expression Reconstruction from an Image](https://github.com/soubhiksanyal/RingNet) +* [Expressive Body Capture: 3D Hands, Face, and Body from a Single Image](https://smpl-x.is.tue.mpg.de/) + +Requirements +------------ + +You first need to install the `Boost `_ +libraries. You can compile your own local version or simply do on +Linux + +``` +$ sudo apt-get install libboost-dev +``` + +or on macOS + +``` +$ brew install boost +``` + +Installation +------------ + +First, create a dedicated Python virtual environment and activate it: + +``` +$ python3 -m venv --copies my_venv +$ source my_venv/bin/activate +``` + +You should then compile and install the ``psbody-mesh`` package easily +using the Makefile: + +``` +$ BOOST_INCLUDE_DIRS=/path/to/boost/include make all +``` + +Testing +------- + +To run the tests, simply do: + +``` +$ make tests +``` + +Documentation +------------- + +A detailed documentation can be compiled using the Makefile: + +``` +$ make documentation +``` + +Viewing the Meshes +------------------ + +Starting from version 0.4 meshviewer ships with `meshviewer` -- a +program that allows you to display polygonal meshes produced by `mesh` +package. + +### Viewing a mesh on a local machine + +The most straightforward use-case is viewing the mesh on the same +machine where it is stored. To do this simply run + +``` +$ meshviewer view sphere.obj +``` + +This will create an interactive window with your mesh rendering. You +can render more than one mesh in the same window by passing several +paths to `view` command + +``` +$ meshviewer view sphere.obj cylinder.obj +``` + +This will arrange the subplots horizontally in a row. If you want a +grid arrangement, you can specify the grid parameters explicitly + +``` +$ meshviewer view -nx 2 -ny 2 *.obj +``` + +### Viewing a mesh from a remote machine + +It is also possible to view a mesh stored on a remote machine. To do +this you need mesh to be installed on both the local and the remote +machines. You start by opening an empty viewer window listening on a +network port + +``` +(local) $ meshviewer open --port 3000 +``` + +To stream a shape to this viewer you have to either pick a port that +is visible from the remote machine or by manually exposing the port +when connecting. For example, through SSH port forwarding + +``` +(local) $ ssh -R 3000:127.0.0.1:3000 user@host +``` + +Then on a remote machine you use `view` command pointing to the +locally forwarded port + +``` +(remote) $ meshviewer view -p 3000 sphere.obj +``` + +This should display the remote mesh on your local viewer. In case it +does not it might be caused by the network connection being closed +before the mesh could be sent. To work around this one can try +increasing the timeout up to 1 second + +``` +(remote) $ meshviewer view -p 3000 --timeout 1 sphere.obj +``` + +To take a snapshot you should locally run a `snap` command + +``` +(local) $ meshviewer snap -p 3000 sphere.png +``` + +License +------- + +Please refer for LICENSE.txt for using this software. The software is +compiled using CGAL sources following the license in CGAL_LICENSE.pdf + +Acknowledgments +--------------- + +We thank the external contribution from the following people: +* [Kenneth Chaney](https://github.com/k-chaney) ([PR #5](https://github.com/MPI-IS/mesh/pull/5)) +* [Dávid Komorowicz](https://github.com/Dawars) ([PR #8](https://github.com/MPI-IS/mesh/pull/8)) diff --git a/mesh-master/bin/meshviewer b/mesh-master/bin/meshviewer new file mode 100644 index 0000000000000000000000000000000000000000..968fba321fc9a4e9684d80973cded40736ab7f22 --- /dev/null +++ b/mesh-master/bin/meshviewer @@ -0,0 +1,379 @@ +#!/usr/bin/env python3 + + +import textwrap + +__doc__ = textwrap.dedent( + """ + `meshviewer` is a program that allows you to display polygonal + meshes produced by `mesh` package. + + Viewing a mesh on a local machine + --------------------------------- + + The most straightforward use-case is viewing the mesh on the same + machine where it is stored. To do this simply run + + ``` + $ meshviewer view sphere.obj + ``` + + This will create an interactive window with your mesh rendering. + You can render more than one mesh in the same window by passing + several paths to `view` command + + ``` + $ meshviewer view sphere.obj cylinder.obj + ``` + + This will arrange the subplots horizontally in a row. If you want + a grid arrangement, you can specify the grid parameters explicitly + + ``` + $ meshviewer view -nx 2 -ny 2 *.obj + ``` + + Viewing a mesh from a remote machine + ------------------------------------ + + It is also possible to view a mesh stored on a remote machine. To + do this you need mesh to be installed on both the local and the + remote machines. You start by opening an empty viewer window + listening on a network port + + ``` + (local) $ meshviewer open --port 3000 + ``` + + To stream a shape to this viewer you have to either pick a port + that is visible from the remote machine or by manually exposing + the port when connecting. For example, through SSH port + forwarding + + ``` + (local) $ ssh -R 3000:127.0.0.1:3000 user@host + ``` + + Then on a remote machine you use `view` command pointing to the + locally forwarded port + + ``` + (remote) $ meshviewer view -p 3000 sphere.obj + ``` + + This should display the remote mesh on your local viewer. In case it + does not it might be caused by the network connection being closed + before the mesh could be sent. To work around this one can try + increasing the timeout up to 1 second + + ``` + (remote) $ meshviewer view -p 3000 --timeout 1 sphere.obj + ``` + + To take a snapshot you should locally run a `snap` command + + ``` + (local) $ meshviewer snap -p 3000 sphere.png + ``` + """) + + +import argparse +import logging +import sys +import time + +from psbody.mesh.mesh import Mesh +from psbody.mesh.meshviewer import ( + MESH_VIEWER_DEFAULT_TITLE, + MESH_VIEWER_DEFAULT_SHAPE, + MESH_VIEWER_DEFAULT_WIDTH, + MESH_VIEWER_DEFAULT_HEIGHT, + ZMQ_HOST, + MeshViewerLocal, + MeshViewerRemote) + + +logging.basicConfig(level=logging.INFO) + + +parser_root = argparse.ArgumentParser( + add_help=False, + description="View the polygonal meshes, locally and across the network", + epilog=__doc__, + formatter_class=argparse.RawTextHelpFormatter) + +subparsers = parser_root.add_subparsers(dest="command") +subparsers.required = True + +parser_open = subparsers.add_parser("open", add_help=False) +parser_open.add_argument( + "-p", "--port", + help="local port to listen for incoming commands", + type=int) + +parser_view = subparsers.add_parser("view", add_help=False) +parser_view.add_argument( + "-h", "--host", + help="remote host", + metavar="HOSTNAME", + type=str) +parser_view.add_argument( + "-p", "--port", + help="remote port", + type=int) +parser_view.add_argument( + "-ix", "--subwindow-index-horizontal", + help="horizontal index of the target subwindow", + metavar="INDEX", + type=int) +parser_view.add_argument( + "-iy", "--subwindow-index-vertical", + help="vertical index of the target subwindow", + metavar="INDEX", + type=int) +parser_view.add_argument( + "--timeout", + help="wait for some time after sending the mesh to let it render", + metavar="SECONDS", + type=float, + default=0.5) +parser_view.add_argument( + "filename", + help="path to the mesh file", + type=str, + nargs="+") + +for parser in parser_open, parser_view: + window_options = parser.add_argument_group("window options") + window_options.add_argument( + "-t", "--title", + help="window title", + type=str) + window_options.add_argument( + "-ww", "-wx", "--window-width", + help="window width in pixels", + metavar="PIXELS", + type=int) + window_options.add_argument( + "-wh", "-wy", "--window-height", + help="window height in pixels", + metavar="PIXELS", + type=int) + window_options.add_argument( + "-nx", "--subwindow-number-horizontal", + help="number of horizontal subwindows", + metavar="NUMBER", + type=int) + window_options.add_argument( + "-ny", "--subwindow-number-vertical", + help="number of vertical subwindows", + metavar="NUMBER", + type=int) + +parser_snap = subparsers.add_parser("snap", add_help=False) +parser_snap.add_argument( + "-h", "--host", + help="remote host", + type=str) +parser_snap.add_argument( + "-p", "--port", + help="remote port", + type=int, + required=True) +parser_snap.add_argument( + "filename", + help="path to the output snapshot", + type=str) + + +for p in parser_root, parser_open, parser_view, parser_snap: + p.add_argument("--help", action="help") + + +def dispatch_command(args): + """ + Performs a sanity check of the passed arguments and then + dispatches the appropriate command. + """ + + if args.command == "open": + start_server(args) + return + + if not args.port: + client = start_local_client(args) + else: + client = start_remote_client(args) + + if args.command == "snap": + take_snapshot(client, args) + + if args.command == "view": + if args.port is not None: + # Below is a list of contradicting settings: it futile to + # try to change the parameters of a mesh viewer already + # running on a remote machine. + if args.title is not None: + logging.warning( + "--title is ignored when working with remote viewer") + + if args.window_width is not None: + logging.warning( + "--window-width is ignored when working with remote viewer") + + if args.window_height is not None: + logging.warning( + "--window-height is ignored when working with remote viewer") + + if args.subwindow_number_horizontal is not None: + logging.warning( + "--subwindow-number-horizontal is ignored when working " + "with remote viewer") + + if args.subwindow_number_vertical is not None: + logging.warning( + "--subwindow-number-vertical is ignored when working " + "with remote viewer") + + # This one is a bit different: while it should be + # technically possible to stream the mesh in a specific + # subwindow, we currently don't support that. + if ( + args.subwindow_index_horizontal is not None or + args.subwindow_index_vertical is not None + ): + logging.warning( + "unfortunately, drawing to a specific subwindow is not " + "supported when working with remote viewer and the first " + "subwindow is going to be used instead") + + if ( + args.subwindow_index_horizontal is not None and + args.subwindow_index_vertical is None + ) or ( + args.subwindow_index_horizontal is None and + args.subwindow_index_vertical is not None + ): + logging.fatal( + "you have to specify both horizontal " + "and vertical subwindow incides") + return + + if ( + args.subwindow_index_horizontal is not None and + args.subwindow_index_vertical is not None + ): + display_single_subwindow(client, args) + else: + display_multi_subwindows(client, args) + + # Basically, wait for send_pyobj() to actually send everything + # before terminating. + time.sleep(args.timeout) + + +def start_server(args): + """ + Starts a meshviewer window on a local machine. + + This function opens a mesh viewer window that listens for command + on a given port. + """ + server = MeshViewerRemote( + titlebar=args.title or MESH_VIEWER_DEFAULT_TITLE, + subwins_vert=args.subwindow_number_vertical or MESH_VIEWER_DEFAULT_SHAPE[1], + subwins_horz=args.subwindow_number_horizontal or MESH_VIEWER_DEFAULT_SHAPE[0], + width=args.window_width or MESH_VIEWER_DEFAULT_WIDTH, + height=args.window_height or MESH_VIEWER_DEFAULT_HEIGHT, + port=args.port) + return server + + +def start_local_client(args): + """ + Starts a local meshviewer not connected to anywhere. + + This function internally opens a mesh viewer window listening on a + random port. + """ + client = MeshViewerLocal( + titlebar=args.title or MESH_VIEWER_DEFAULT_TITLE, + window_width=args.window_width or MESH_VIEWER_DEFAULT_WIDTH, + window_height=args.window_height or MESH_VIEWER_DEFAULT_HEIGHT, + shape=( + args.subwindow_number_vertical or 1, + args.subwindow_number_horizontal or len(args.filename), + ), + keepalive=True) + return client + + +def start_remote_client(args): + """ + Starts a meshviewer client connected to a remote machine. + + This function does not create a new window, but is necessary to + stream the mesh to a remote viewer. + """ + client = MeshViewerLocal( + host=args.host or ZMQ_HOST, + port=args.port) + return client + + +def display_single_subwindow(client, args): + """ + Displays a single mesh in a given subwindow. + """ + ix = args.subwindow_index_horizontal + iy = args.subwindow_index_vertical + + try: + subwindow = client.get_subwindows()[iy][ix] + except IndexError: + logging.fatal( + "cannot find subwindow ({}, {}). " + "The current viewer shape is {}x{} subwindows, " + "indexing is zero-based." + .format(ix, iy, *client.shape)) + return + + meshes = [Mesh(filename=filename) for filename in args.filename] + subwindow.set_static_meshes(meshes) + + +def display_multi_subwindows(client, args): + """ + Displays a list of meshes. One mesh per subwindow. + """ + grid = client.get_subwindows() + + subwindows = [ + subwindow + for row in grid + for subwindow in row + ] + + if len(subwindows) < len(args.filename): + logging.warning( + "cannot display {0} meshes in {1} subwindows. " + "Taking the first {1}.".format( + len(args.filename), len(subwindows))) + + for subwindow, filename in zip(subwindows, args.filename): + mesh = Mesh(filename=filename) + subwindow.set_static_meshes([mesh]) + + +def take_snapshot(client, args): + """ + Take snapshot and dump it into a file. + """ + client.save_snapshot(args.filename) + + +if __name__ == "__main__": + args = parser_root.parse_args() + dispatch_command(args) + sys.exit(0) diff --git a/mesh-master/data/unittest/cylinder.obj b/mesh-master/data/unittest/cylinder.obj new file mode 100644 index 0000000000000000000000000000000000000000..70c26ebf185b6addd4a9061bfb3e25c5f902af08 --- /dev/null +++ b/mesh-master/data/unittest/cylinder.obj @@ -0,0 +1,38 @@ +# Blender v2.61 (sub 0) OBJ File: '' +# www.blender.org +g Cylinder +v 0.000000 -1.000000 -1.000000 +v 0.000000 -1.000000 1.000000 +v -0.382683 -1.000000 0.923880 +v -0.707107 -1.000000 0.707107 +v -0.923880 -1.000000 0.382684 +v -1.000000 -1.000000 -0.000000 +v -0.923879 -1.000000 -0.382684 +v -0.707107 -1.000000 -0.707107 +v -0.382683 -1.000000 -0.923880 +v 0.000001 1.000000 -1.000000 +v -0.000002 1.000000 1.000000 +v -0.382685 1.000000 0.923879 +v -0.707108 1.000000 0.707105 +v -0.923880 1.000000 0.382681 +v -1.000000 1.000000 -0.000003 +v -0.923878 1.000000 -0.382686 +v -0.707105 1.000000 -0.707109 +v -0.382681 1.000000 -0.923881 +s off +f 10 1 18 +f 1 9 18 +f 8 17 9 +f 17 18 9 +f 7 16 8 +f 16 17 8 +f 6 15 7 +f 15 16 7 +f 5 14 6 +f 14 15 6 +f 4 13 5 +f 13 14 5 +f 3 12 4 +f 12 13 4 +f 2 11 3 +f 11 12 3 diff --git a/mesh-master/data/unittest/cylinder_trans.obj b/mesh-master/data/unittest/cylinder_trans.obj new file mode 100644 index 0000000000000000000000000000000000000000..f15d79ffb1036c28e0db38cbfde55e1c104b3f3b --- /dev/null +++ b/mesh-master/data/unittest/cylinder_trans.obj @@ -0,0 +1,38 @@ +# Blender v2.61 (sub 0) OBJ File: '' +# www.blender.org +g Cylinder +v 1.057678 -1.000000 -1.000000 +v 1.057678 -1.000000 1.000000 +v 0.674994 -1.000000 0.923880 +v 0.350571 -1.000000 0.707107 +v 0.133798 -1.000000 0.382684 +v 0.057678 -1.000000 -0.000000 +v 0.133798 -1.000000 -0.382684 +v 0.350571 -1.000000 -0.707107 +v 0.674995 -1.000000 -0.923880 +v 1.057678 1.000000 -1.000000 +v 1.057676 1.000000 1.000000 +v 0.674992 1.000000 0.923879 +v 0.350569 1.000000 0.707105 +v 0.133797 1.000000 0.382681 +v 0.057678 1.000000 -0.000003 +v 0.133799 1.000000 -0.382686 +v 0.350573 1.000000 -0.707109 +v 0.674997 1.000000 -0.923881 +s off +f 10 1 18 +f 1 9 18 +f 8 17 9 +f 17 18 9 +f 7 16 8 +f 16 17 8 +f 6 15 7 +f 15 16 7 +f 5 14 6 +f 14 15 6 +f 4 13 5 +f 13 14 5 +f 3 12 4 +f 12 13 4 +f 2 11 3 +f 11 12 3 diff --git a/mesh-master/data/unittest/self_intersecting_cyl.obj b/mesh-master/data/unittest/self_intersecting_cyl.obj new file mode 100644 index 0000000000000000000000000000000000000000..e5044b282bdf5e84b309eb5248b992c274eb0e70 --- /dev/null +++ b/mesh-master/data/unittest/self_intersecting_cyl.obj @@ -0,0 +1,46 @@ +# Blender v2.61 (sub 0) OBJ File: '' +# www.blender.org +g Plane +v 0.000000 -0.500000 -1.000000 +v 0.707107 -0.500000 -0.707107 +v 1.000000 -0.500000 0.000000 +v 0.707107 -0.500000 0.707107 +v -0.000000 -0.500000 1.000000 +v -0.707107 -0.500000 0.707107 +v -1.000000 -0.500000 -0.000000 +v -0.707107 -0.500000 -0.707107 +v -0.000000 0.500000 -1.000000 +v 0.707106 0.500000 -0.707107 +v 1.000000 0.500000 -0.000001 +v 0.707107 0.500000 0.707107 +v -0.000000 0.500000 1.000000 +v -0.707107 0.500000 0.707107 +v -1.000000 0.500000 -0.000001 +v -0.707106 0.500000 -0.707107 +v 0.000000 -0.500000 0.000000 +v 0.000000 -0.835754 0.000000 +s off +f 17 1 2 +f 18 10 9 +f 17 2 3 +f 18 11 10 +f 17 3 4 +f 18 12 11 +f 17 4 5 +f 18 13 12 +f 17 5 6 +f 18 14 13 +f 17 6 7 +f 18 15 14 +f 17 7 8 +f 18 16 15 +f 8 1 17 +f 18 9 16 +f 1 9 10 2 +f 2 10 11 3 +f 3 11 12 4 +f 4 12 13 5 +f 5 13 14 6 +f 6 14 15 7 +f 7 15 16 8 +f 9 1 8 16 diff --git a/mesh-master/data/unittest/sphere.obj b/mesh-master/data/unittest/sphere.obj new file mode 100644 index 0000000000000000000000000000000000000000..393b95f98215a85135139e20a61832e21df5ae0b --- /dev/null +++ b/mesh-master/data/unittest/sphere.obj @@ -0,0 +1,1278 @@ +#### +# +# OBJ File Generated by Meshlab +# +#### +# Object sphere.obj +# +# Vertices: 422 +# Faces: 840 +# +#### +v 0.000000 0.000000 -127.000000 +v 5.000000 25.000000 -125.000000 +v 10.000000 24.000000 -125.000000 +v 15.000000 21.000000 -125.000000 +v 19.000000 17.000000 -125.000000 +v 22.000000 13.000000 -125.000000 +v 25.000000 8.000000 -125.000000 +v 26.000000 2.000000 -125.000000 +v 26.000000 -3.000000 -125.000000 +v 25.000000 -9.000000 -125.000000 +v 22.000000 -14.000000 -125.000000 +v 19.000000 -18.000000 -125.000000 +v 15.000000 -22.000000 -125.000000 +v 10.000000 -25.000000 -125.000000 +v 5.000000 -26.000000 -125.000000 +v 0.000000 -27.000000 -125.000000 +v -6.000000 -26.000000 -125.000000 +v -11.000000 -25.000000 -125.000000 +v -16.000000 -22.000000 -125.000000 +v -20.000000 -18.000000 -125.000000 +v -23.000000 -14.000000 -125.000000 +v -26.000000 -9.000000 -125.000000 +v -27.000000 -3.000000 -125.000000 +v -27.000000 2.000000 -125.000000 +v -26.000000 8.000000 -125.000000 +v -23.000000 13.000000 -125.000000 +v -20.000000 17.000000 -125.000000 +v -16.000000 21.000000 -125.000000 +v -11.000000 24.000000 -125.000000 +v -6.000000 25.000000 -125.000000 +v -1.000000 26.000000 -125.000000 +v 10.000000 50.000000 -117.000000 +v 21.000000 47.000000 -117.000000 +v 30.000000 41.000000 -117.000000 +v 38.000000 34.000000 -117.000000 +v 44.000000 25.000000 -117.000000 +v 49.000000 15.000000 -117.000000 +v 51.000000 5.000000 -117.000000 +v 51.000000 -6.000000 -117.000000 +v 49.000000 -16.000000 -117.000000 +v 44.000000 -26.000000 -117.000000 +v 38.000000 -35.000000 -117.000000 +v 30.000000 -42.000000 -117.000000 +v 21.000000 -48.000000 -117.000000 +v 10.000000 -51.000000 -117.000000 +v 0.000000 -52.000000 -117.000000 +v -11.000000 -51.000000 -117.000000 +v -22.000000 -48.000000 -117.000000 +v -31.000000 -42.000000 -117.000000 +v -39.000000 -35.000000 -117.000000 +v -45.000000 -26.000000 -117.000000 +v -50.000000 -16.000000 -117.000000 +v -52.000000 -6.000000 -117.000000 +v -52.000000 5.000000 -117.000000 +v -50.000000 15.000000 -117.000000 +v -45.000000 25.000000 -117.000000 +v -39.000000 34.000000 -117.000000 +v -31.000000 41.000000 -117.000000 +v -22.000000 47.000000 -117.000000 +v -11.000000 50.000000 -117.000000 +v -1.000000 51.000000 -117.000000 +v 15.000000 73.000000 -103.000000 +v 30.000000 68.000000 -103.000000 +v 43.000000 60.000000 -103.000000 +v 55.000000 49.000000 -103.000000 +v 64.000000 37.000000 -103.000000 +v 70.000000 23.000000 -103.000000 +v 74.000000 7.000000 -103.000000 +v 74.000000 -8.000000 -103.000000 +v 70.000000 -24.000000 -103.000000 +v 64.000000 -38.000000 -103.000000 +v 55.000000 -50.000000 -103.000000 +v 43.000000 -61.000000 -103.000000 +v 30.000000 -69.000000 -103.000000 +v 15.000000 -74.000000 -103.000000 +v 0.000000 -75.000000 -103.000000 +v -16.000000 -74.000000 -103.000000 +v -31.000000 -69.000000 -103.000000 +v -44.000000 -61.000000 -103.000000 +v -56.000000 -50.000000 -103.000000 +v -65.000000 -38.000000 -103.000000 +v -71.000000 -24.000000 -103.000000 +v -75.000000 -8.000000 -103.000000 +v -75.000000 7.000000 -103.000000 +v -71.000000 23.000000 -103.000000 +v -65.000000 37.000000 -103.000000 +v -56.000000 49.000000 -103.000000 +v -44.000000 60.000000 -103.000000 +v -31.000000 68.000000 -103.000000 +v -16.000000 73.000000 -103.000000 +v -1.000000 74.000000 -103.000000 +v 19.000000 92.000000 -85.000000 +v 38.000000 86.000000 -85.000000 +v 55.000000 76.000000 -85.000000 +v 70.000000 63.000000 -85.000000 +v 81.000000 47.000000 -85.000000 +v 89.000000 29.000000 -85.000000 +v 93.000000 9.000000 -85.000000 +v 93.000000 -10.000000 -85.000000 +v 89.000000 -30.000000 -85.000000 +v 81.000000 -48.000000 -85.000000 +v 70.000000 -64.000000 -85.000000 +v 55.000000 -77.000000 -85.000000 +v 38.000000 -87.000000 -85.000000 +v 19.000000 -93.000000 -85.000000 +v 0.000000 -95.000000 -85.000000 +v -20.000000 -93.000000 -85.000000 +v -39.000000 -87.000000 -85.000000 +v -56.000000 -77.000000 -85.000000 +v -71.000000 -64.000000 -85.000000 +v -82.000000 -48.000000 -85.000000 +v -90.000000 -30.000000 -85.000000 +v -94.000000 -10.000000 -85.000000 +v -94.000000 9.000000 -85.000000 +v -90.000000 29.000000 -85.000000 +v -82.000000 47.000000 -85.000000 +v -71.000000 63.000000 -85.000000 +v -56.000000 76.000000 -85.000000 +v -39.000000 86.000000 -85.000000 +v -20.000000 92.000000 -85.000000 +v -1.000000 94.000000 -85.000000 +v 22.000000 107.000000 -64.000000 +v 44.000000 100.000000 -64.000000 +v 64.000000 88.000000 -64.000000 +v 81.000000 73.000000 -64.000000 +v 95.000000 54.000000 -64.000000 +v 104.000000 33.000000 -64.000000 +v 109.000000 11.000000 -64.000000 +v 109.000000 -12.000000 -64.000000 +v 104.000000 -34.000000 -64.000000 +v 95.000000 -55.000000 -64.000000 +v 81.000000 -74.000000 -64.000000 +v 64.000000 -89.000000 -64.000000 +v 44.000000 -101.000000 -64.000000 +v 22.000000 -108.000000 -64.000000 +v 0.000000 -110.000000 -64.000000 +v -23.000000 -108.000000 -64.000000 +v -45.000000 -101.000000 -64.000000 +v -65.000000 -89.000000 -64.000000 +v -82.000000 -74.000000 -64.000000 +v -96.000000 -55.000000 -64.000000 +v -105.000000 -34.000000 -64.000000 +v -110.000000 -12.000000 -64.000000 +v -110.000000 11.000000 -64.000000 +v -105.000000 33.000000 -64.000000 +v -96.000000 54.000000 -64.000000 +v -82.000000 73.000000 -64.000000 +v -65.000000 88.000000 -64.000000 +v -45.000000 100.000000 -64.000000 +v -23.000000 107.000000 -64.000000 +v -1.000000 109.000000 -64.000000 +v 25.000000 118.000000 -40.000000 +v 49.000000 110.000000 -40.000000 +v 70.000000 97.000000 -40.000000 +v 89.000000 80.000000 -40.000000 +v 104.000000 60.000000 -40.000000 +v 114.000000 37.000000 -40.000000 +v 120.000000 12.000000 -40.000000 +v 120.000000 -13.000000 -40.000000 +v 114.000000 -38.000000 -40.000000 +v 104.000000 -61.000000 -40.000000 +v 89.000000 -81.000000 -40.000000 +v 70.000000 -98.000000 -40.000000 +v 49.000000 -111.000000 -40.000000 +v 25.000000 -119.000000 -40.000000 +v 0.000000 -121.000000 -40.000000 +v -26.000000 -119.000000 -40.000000 +v -50.000000 -111.000000 -40.000000 +v -71.000000 -98.000000 -40.000000 +v -90.000000 -81.000000 -40.000000 +v -105.000000 -61.000000 -40.000000 +v -115.000000 -38.000000 -40.000000 +v -121.000000 -13.000000 -40.000000 +v -121.000000 12.000000 -40.000000 +v -115.000000 37.000000 -40.000000 +v -105.000000 60.000000 -40.000000 +v -90.000000 80.000000 -40.000000 +v -71.000000 97.000000 -40.000000 +v -50.000000 110.000000 -40.000000 +v -26.000000 118.000000 -40.000000 +v -1.000000 120.000000 -40.000000 +v 26.000000 123.000000 -14.000000 +v 51.000000 115.000000 -14.000000 +v 74.000000 102.000000 -14.000000 +v 93.000000 84.000000 -14.000000 +v 109.000000 63.000000 -14.000000 +v 120.000000 39.000000 -14.000000 +v 125.000000 13.000000 -14.000000 +v 125.000000 -14.000000 -14.000000 +v 120.000000 -40.000000 -14.000000 +v 109.000000 -64.000000 -14.000000 +v 93.000000 -85.000000 -14.000000 +v 74.000000 -103.000000 -14.000000 +v 51.000000 -116.000000 -14.000000 +v 26.000000 -124.000000 -14.000000 +v 0.000000 -127.000000 -14.000000 +v -27.000000 -124.000000 -14.000000 +v -52.000000 -116.000000 -14.000000 +v -75.000000 -103.000000 -14.000000 +v -94.000000 -85.000000 -14.000000 +v -110.000000 -64.000000 -14.000000 +v -121.000000 -40.000000 -14.000000 +v -126.000000 -14.000000 -14.000000 +v -126.000000 13.000000 -14.000000 +v -121.000000 39.000000 -14.000000 +v -110.000000 63.000000 -14.000000 +v -94.000000 84.000000 -14.000000 +v -75.000000 102.000000 -14.000000 +v -52.000000 115.000000 -14.000000 +v -27.000000 123.000000 -14.000000 +v -1.000000 126.000000 -14.000000 +v 26.000000 123.000000 13.000000 +v 51.000000 115.000000 13.000000 +v 74.000000 102.000000 13.000000 +v 93.000000 84.000000 13.000000 +v 109.000000 63.000000 13.000000 +v 120.000000 39.000000 13.000000 +v 125.000000 13.000000 13.000000 +v 125.000000 -14.000000 13.000000 +v 120.000000 -40.000000 13.000000 +v 109.000000 -64.000000 13.000000 +v 93.000000 -85.000000 13.000000 +v 74.000000 -103.000000 13.000000 +v 51.000000 -116.000000 13.000000 +v 26.000000 -124.000000 13.000000 +v 0.000000 -127.000000 13.000000 +v -27.000000 -124.000000 13.000000 +v -52.000000 -116.000000 13.000000 +v -75.000000 -103.000000 13.000000 +v -94.000000 -85.000000 13.000000 +v -110.000000 -64.000000 13.000000 +v -121.000000 -40.000000 13.000000 +v -126.000000 -14.000000 13.000000 +v -126.000000 13.000000 13.000000 +v -121.000000 39.000000 13.000000 +v -110.000000 63.000000 13.000000 +v -94.000000 84.000000 13.000000 +v -75.000000 102.000000 13.000000 +v -52.000000 115.000000 13.000000 +v -27.000000 123.000000 13.000000 +v -1.000000 126.000000 13.000000 +v 25.000000 118.000000 39.000000 +v 49.000000 110.000000 39.000000 +v 70.000000 97.000000 39.000000 +v 89.000000 80.000000 39.000000 +v 104.000000 60.000000 39.000000 +v 114.000000 37.000000 39.000000 +v 120.000000 12.000000 39.000000 +v 120.000000 -13.000000 39.000000 +v 114.000000 -38.000000 39.000000 +v 104.000000 -61.000000 39.000000 +v 89.000000 -81.000000 39.000000 +v 70.000000 -98.000000 39.000000 +v 49.000000 -111.000000 39.000000 +v 25.000000 -119.000000 39.000000 +v 0.000000 -121.000000 39.000000 +v -26.000000 -119.000000 39.000000 +v -50.000000 -111.000000 39.000000 +v -71.000000 -98.000000 39.000000 +v -90.000000 -81.000000 39.000000 +v -105.000000 -61.000000 39.000000 +v -115.000000 -38.000000 39.000000 +v -121.000000 -13.000000 39.000000 +v -121.000000 12.000000 39.000000 +v -115.000000 37.000000 39.000000 +v -105.000000 60.000000 39.000000 +v -90.000000 80.000000 39.000000 +v -71.000000 97.000000 39.000000 +v -50.000000 110.000000 39.000000 +v -26.000000 118.000000 39.000000 +v -1.000000 120.000000 39.000000 +v 22.000000 107.000000 63.000000 +v 44.000000 100.000000 63.000000 +v 64.000000 88.000000 63.000000 +v 81.000000 73.000000 63.000000 +v 95.000000 54.000000 63.000000 +v 104.000000 33.000000 63.000000 +v 109.000000 11.000000 63.000000 +v 109.000000 -12.000000 63.000000 +v 104.000000 -34.000000 63.000000 +v 95.000000 -55.000000 63.000000 +v 81.000000 -74.000000 63.000000 +v 64.000000 -89.000000 63.000000 +v 44.000000 -101.000000 63.000000 +v 22.000000 -108.000000 63.000000 +v 0.000000 -110.000000 63.000000 +v -23.000000 -108.000000 63.000000 +v -45.000000 -101.000000 63.000000 +v -65.000000 -89.000000 63.000000 +v -82.000000 -74.000000 63.000000 +v -96.000000 -55.000000 63.000000 +v -105.000000 -34.000000 63.000000 +v -110.000000 -12.000000 63.000000 +v -110.000000 11.000000 63.000000 +v -105.000000 33.000000 63.000000 +v -96.000000 54.000000 63.000000 +v -82.000000 73.000000 63.000000 +v -65.000000 88.000000 63.000000 +v -45.000000 100.000000 63.000000 +v -23.000000 107.000000 63.000000 +v -1.000000 109.000000 63.000000 +v 19.000000 92.000000 84.000000 +v 38.000000 86.000000 84.000000 +v 55.000000 76.000000 84.000000 +v 70.000000 63.000000 84.000000 +v 81.000000 47.000000 84.000000 +v 89.000000 29.000000 84.000000 +v 93.000000 9.000000 84.000000 +v 93.000000 -10.000000 84.000000 +v 89.000000 -30.000000 84.000000 +v 81.000000 -48.000000 84.000000 +v 70.000000 -64.000000 84.000000 +v 55.000000 -77.000000 84.000000 +v 38.000000 -87.000000 84.000000 +v 19.000000 -93.000000 84.000000 +v 0.000000 -95.000000 84.000000 +v -20.000000 -93.000000 84.000000 +v -39.000000 -87.000000 84.000000 +v -56.000000 -77.000000 84.000000 +v -71.000000 -64.000000 84.000000 +v -82.000000 -48.000000 84.000000 +v -90.000000 -30.000000 84.000000 +v -94.000000 -10.000000 84.000000 +v -94.000000 9.000000 84.000000 +v -90.000000 29.000000 84.000000 +v -82.000000 47.000000 84.000000 +v -71.000000 63.000000 84.000000 +v -56.000000 76.000000 84.000000 +v -39.000000 86.000000 84.000000 +v -20.000000 92.000000 84.000000 +v -1.000000 94.000000 84.000000 +v 15.000000 73.000000 102.000000 +v 30.000000 68.000000 102.000000 +v 43.000000 60.000000 102.000000 +v 55.000000 49.000000 102.000000 +v 64.000000 37.000000 102.000000 +v 70.000000 23.000000 102.000000 +v 74.000000 7.000000 102.000000 +v 74.000000 -8.000000 102.000000 +v 70.000000 -24.000000 102.000000 +v 64.000000 -38.000000 102.000000 +v 55.000000 -50.000000 102.000000 +v 43.000000 -61.000000 102.000000 +v 30.000000 -69.000000 102.000000 +v 15.000000 -74.000000 102.000000 +v 0.000000 -75.000000 102.000000 +v -16.000000 -74.000000 102.000000 +v -31.000000 -69.000000 102.000000 +v -44.000000 -61.000000 102.000000 +v -56.000000 -50.000000 102.000000 +v -65.000000 -38.000000 102.000000 +v -71.000000 -24.000000 102.000000 +v -75.000000 -8.000000 102.000000 +v -75.000000 7.000000 102.000000 +v -71.000000 23.000000 102.000000 +v -65.000000 37.000000 102.000000 +v -56.000000 49.000000 102.000000 +v -44.000000 60.000000 102.000000 +v -31.000000 68.000000 102.000000 +v -16.000000 73.000000 102.000000 +v -1.000000 74.000000 102.000000 +v 10.000000 50.000000 116.000000 +v 21.000000 47.000000 116.000000 +v 30.000000 41.000000 116.000000 +v 38.000000 34.000000 116.000000 +v 44.000000 25.000000 116.000000 +v 49.000000 15.000000 116.000000 +v 51.000000 5.000000 116.000000 +v 51.000000 -6.000000 116.000000 +v 49.000000 -16.000000 116.000000 +v 44.000000 -26.000000 116.000000 +v 38.000000 -35.000000 116.000000 +v 30.000000 -42.000000 116.000000 +v 21.000000 -48.000000 116.000000 +v 10.000000 -51.000000 116.000000 +v 0.000000 -52.000000 116.000000 +v -11.000000 -51.000000 116.000000 +v -22.000000 -48.000000 116.000000 +v -31.000000 -42.000000 116.000000 +v -39.000000 -35.000000 116.000000 +v -45.000000 -26.000000 116.000000 +v -50.000000 -16.000000 116.000000 +v -52.000000 -6.000000 116.000000 +v -52.000000 5.000000 116.000000 +v -50.000000 15.000000 116.000000 +v -45.000000 25.000000 116.000000 +v -39.000000 34.000000 116.000000 +v -31.000000 41.000000 116.000000 +v -22.000000 47.000000 116.000000 +v -11.000000 50.000000 116.000000 +v -1.000000 51.000000 116.000000 +v 5.000000 25.000000 124.000000 +v 10.000000 24.000000 124.000000 +v 15.000000 21.000000 124.000000 +v 19.000000 17.000000 124.000000 +v 22.000000 13.000000 124.000000 +v 25.000000 8.000000 124.000000 +v 26.000000 2.000000 124.000000 +v 26.000000 -3.000000 124.000000 +v 25.000000 -9.000000 124.000000 +v 22.000000 -14.000000 124.000000 +v 19.000000 -18.000000 124.000000 +v 15.000000 -22.000000 124.000000 +v 10.000000 -25.000000 124.000000 +v 5.000000 -26.000000 124.000000 +v 0.000000 -27.000000 124.000000 +v -6.000000 -26.000000 124.000000 +v -11.000000 -25.000000 124.000000 +v -16.000000 -22.000000 124.000000 +v -20.000000 -18.000000 124.000000 +v -23.000000 -14.000000 124.000000 +v -26.000000 -9.000000 124.000000 +v -27.000000 -3.000000 124.000000 +v -27.000000 2.000000 124.000000 +v -26.000000 8.000000 124.000000 +v -23.000000 13.000000 124.000000 +v -20.000000 17.000000 124.000000 +v -16.000000 21.000000 124.000000 +v -11.000000 24.000000 124.000000 +v -6.000000 25.000000 124.000000 +v -1.000000 26.000000 124.000000 +v 0.000000 0.000000 127.000000 +# 422 vertices, 0 vertices normals + +f 1 2 3 +f 1 3 4 +f 1 4 5 +f 1 5 6 +f 1 6 7 +f 1 7 8 +f 1 8 9 +f 1 9 10 +f 1 10 11 +f 1 11 12 +f 1 12 13 +f 1 13 14 +f 1 14 15 +f 1 15 16 +f 1 16 17 +f 1 17 18 +f 1 18 19 +f 1 19 20 +f 1 20 21 +f 1 21 22 +f 1 22 23 +f 1 23 24 +f 1 24 25 +f 1 25 26 +f 1 26 27 +f 1 27 28 +f 1 28 29 +f 1 29 30 +f 1 30 31 +f 1 31 2 +f 2 33 3 +f 33 2 32 +f 3 34 4 +f 34 3 33 +f 4 35 5 +f 35 4 34 +f 5 36 6 +f 36 5 35 +f 6 37 7 +f 37 6 36 +f 7 38 8 +f 38 7 37 +f 8 39 9 +f 39 8 38 +f 9 40 10 +f 40 9 39 +f 10 41 11 +f 41 10 40 +f 11 42 12 +f 42 11 41 +f 12 43 13 +f 43 12 42 +f 13 44 14 +f 44 13 43 +f 14 45 15 +f 45 14 44 +f 15 46 16 +f 46 15 45 +f 16 47 17 +f 47 16 46 +f 17 48 18 +f 48 17 47 +f 18 49 19 +f 49 18 48 +f 19 50 20 +f 50 19 49 +f 20 51 21 +f 51 20 50 +f 21 52 22 +f 52 21 51 +f 22 53 23 +f 53 22 52 +f 23 54 24 +f 54 23 53 +f 24 55 25 +f 55 24 54 +f 25 56 26 +f 56 25 55 +f 26 57 27 +f 57 26 56 +f 27 58 28 +f 58 27 57 +f 28 59 29 +f 59 28 58 +f 29 60 30 +f 60 29 59 +f 30 61 31 +f 61 30 60 +f 31 32 2 +f 32 31 61 +f 32 63 33 +f 63 32 62 +f 33 64 34 +f 64 33 63 +f 34 65 35 +f 65 34 64 +f 35 66 36 +f 66 35 65 +f 36 67 37 +f 67 36 66 +f 37 68 38 +f 68 37 67 +f 38 69 39 +f 69 38 68 +f 39 70 40 +f 70 39 69 +f 40 71 41 +f 71 40 70 +f 41 72 42 +f 72 41 71 +f 42 73 43 +f 73 42 72 +f 43 74 44 +f 74 43 73 +f 44 75 45 +f 75 44 74 +f 45 76 46 +f 76 45 75 +f 46 77 47 +f 77 46 76 +f 47 78 48 +f 78 47 77 +f 48 79 49 +f 79 48 78 +f 49 80 50 +f 80 49 79 +f 50 81 51 +f 81 50 80 +f 51 82 52 +f 82 51 81 +f 52 83 53 +f 83 52 82 +f 53 84 54 +f 84 53 83 +f 54 85 55 +f 85 54 84 +f 55 86 56 +f 86 55 85 +f 56 87 57 +f 87 56 86 +f 57 88 58 +f 88 57 87 +f 58 89 59 +f 89 58 88 +f 59 90 60 +f 90 59 89 +f 60 91 61 +f 91 60 90 +f 61 62 32 +f 62 61 91 +f 62 93 63 +f 93 62 92 +f 63 94 64 +f 94 63 93 +f 64 95 65 +f 95 64 94 +f 65 96 66 +f 96 65 95 +f 66 97 67 +f 97 66 96 +f 67 98 68 +f 98 67 97 +f 68 99 69 +f 99 68 98 +f 69 100 70 +f 100 69 99 +f 70 101 71 +f 101 70 100 +f 71 102 72 +f 102 71 101 +f 72 103 73 +f 103 72 102 +f 73 104 74 +f 104 73 103 +f 74 105 75 +f 105 74 104 +f 75 106 76 +f 106 75 105 +f 76 107 77 +f 107 76 106 +f 77 108 78 +f 108 77 107 +f 78 109 79 +f 109 78 108 +f 79 110 80 +f 110 79 109 +f 80 111 81 +f 111 80 110 +f 81 112 82 +f 112 81 111 +f 82 113 83 +f 113 82 112 +f 83 114 84 +f 114 83 113 +f 84 115 85 +f 115 84 114 +f 85 116 86 +f 116 85 115 +f 86 117 87 +f 117 86 116 +f 87 118 88 +f 118 87 117 +f 88 119 89 +f 119 88 118 +f 89 120 90 +f 120 89 119 +f 90 121 91 +f 121 90 120 +f 91 92 62 +f 92 91 121 +f 92 123 93 +f 123 92 122 +f 93 124 94 +f 124 93 123 +f 94 125 95 +f 125 94 124 +f 95 126 96 +f 126 95 125 +f 96 127 97 +f 127 96 126 +f 97 128 98 +f 128 97 127 +f 98 129 99 +f 129 98 128 +f 99 130 100 +f 130 99 129 +f 100 131 101 +f 131 100 130 +f 101 132 102 +f 132 101 131 +f 102 133 103 +f 133 102 132 +f 103 134 104 +f 134 103 133 +f 104 135 105 +f 135 104 134 +f 105 136 106 +f 136 105 135 +f 106 137 107 +f 137 106 136 +f 107 138 108 +f 138 107 137 +f 108 139 109 +f 139 108 138 +f 109 140 110 +f 140 109 139 +f 110 141 111 +f 141 110 140 +f 111 142 112 +f 142 111 141 +f 112 143 113 +f 143 112 142 +f 113 144 114 +f 144 113 143 +f 114 145 115 +f 145 114 144 +f 115 146 116 +f 146 115 145 +f 116 147 117 +f 147 116 146 +f 117 148 118 +f 148 117 147 +f 118 149 119 +f 149 118 148 +f 119 150 120 +f 150 119 149 +f 120 151 121 +f 151 120 150 +f 121 122 92 +f 122 121 151 +f 122 153 123 +f 153 122 152 +f 123 154 124 +f 154 123 153 +f 124 155 125 +f 155 124 154 +f 125 156 126 +f 156 125 155 +f 126 157 127 +f 157 126 156 +f 127 158 128 +f 158 127 157 +f 128 159 129 +f 159 128 158 +f 129 160 130 +f 160 129 159 +f 130 161 131 +f 161 130 160 +f 131 162 132 +f 162 131 161 +f 132 163 133 +f 163 132 162 +f 133 164 134 +f 164 133 163 +f 134 165 135 +f 165 134 164 +f 135 166 136 +f 166 135 165 +f 136 167 137 +f 167 136 166 +f 137 168 138 +f 168 137 167 +f 138 169 139 +f 169 138 168 +f 139 170 140 +f 170 139 169 +f 140 171 141 +f 171 140 170 +f 141 172 142 +f 172 141 171 +f 142 173 143 +f 173 142 172 +f 143 174 144 +f 174 143 173 +f 144 175 145 +f 175 144 174 +f 145 176 146 +f 176 145 175 +f 146 177 147 +f 177 146 176 +f 147 178 148 +f 178 147 177 +f 148 179 149 +f 179 148 178 +f 149 180 150 +f 180 149 179 +f 150 181 151 +f 181 150 180 +f 151 152 122 +f 152 151 181 +f 152 183 153 +f 183 152 182 +f 153 184 154 +f 184 153 183 +f 154 185 155 +f 185 154 184 +f 155 186 156 +f 186 155 185 +f 156 187 157 +f 187 156 186 +f 157 188 158 +f 188 157 187 +f 158 189 159 +f 189 158 188 +f 159 190 160 +f 190 159 189 +f 160 191 161 +f 191 160 190 +f 161 192 162 +f 192 161 191 +f 162 193 163 +f 193 162 192 +f 163 194 164 +f 194 163 193 +f 164 195 165 +f 195 164 194 +f 165 196 166 +f 196 165 195 +f 166 197 167 +f 197 166 196 +f 167 198 168 +f 198 167 197 +f 168 199 169 +f 199 168 198 +f 169 200 170 +f 200 169 199 +f 170 201 171 +f 201 170 200 +f 171 202 172 +f 202 171 201 +f 172 203 173 +f 203 172 202 +f 173 204 174 +f 204 173 203 +f 174 205 175 +f 205 174 204 +f 175 206 176 +f 206 175 205 +f 176 207 177 +f 207 176 206 +f 177 208 178 +f 208 177 207 +f 178 209 179 +f 209 178 208 +f 179 210 180 +f 210 179 209 +f 180 211 181 +f 211 180 210 +f 181 182 152 +f 182 181 211 +f 182 213 183 +f 213 182 212 +f 183 214 184 +f 214 183 213 +f 184 215 185 +f 215 184 214 +f 185 216 186 +f 216 185 215 +f 186 217 187 +f 217 186 216 +f 187 218 188 +f 218 187 217 +f 188 219 189 +f 219 188 218 +f 189 220 190 +f 220 189 219 +f 190 221 191 +f 221 190 220 +f 191 222 192 +f 222 191 221 +f 192 223 193 +f 223 192 222 +f 193 224 194 +f 224 193 223 +f 194 225 195 +f 225 194 224 +f 195 226 196 +f 226 195 225 +f 196 227 197 +f 227 196 226 +f 197 228 198 +f 228 197 227 +f 198 229 199 +f 229 198 228 +f 199 230 200 +f 230 199 229 +f 200 231 201 +f 231 200 230 +f 201 232 202 +f 232 201 231 +f 202 233 203 +f 233 202 232 +f 203 234 204 +f 234 203 233 +f 204 235 205 +f 235 204 234 +f 205 236 206 +f 236 205 235 +f 206 237 207 +f 237 206 236 +f 207 238 208 +f 238 207 237 +f 208 239 209 +f 239 208 238 +f 209 240 210 +f 240 209 239 +f 210 241 211 +f 241 210 240 +f 211 212 182 +f 212 211 241 +f 212 243 213 +f 243 212 242 +f 213 244 214 +f 244 213 243 +f 214 245 215 +f 245 214 244 +f 215 246 216 +f 246 215 245 +f 216 247 217 +f 247 216 246 +f 217 248 218 +f 248 217 247 +f 218 249 219 +f 249 218 248 +f 219 250 220 +f 250 219 249 +f 220 251 221 +f 251 220 250 +f 221 252 222 +f 252 221 251 +f 222 253 223 +f 253 222 252 +f 223 254 224 +f 254 223 253 +f 224 255 225 +f 255 224 254 +f 225 256 226 +f 256 225 255 +f 226 257 227 +f 257 226 256 +f 227 258 228 +f 258 227 257 +f 228 259 229 +f 259 228 258 +f 229 260 230 +f 260 229 259 +f 230 261 231 +f 261 230 260 +f 231 262 232 +f 262 231 261 +f 232 263 233 +f 263 232 262 +f 233 264 234 +f 264 233 263 +f 234 265 235 +f 265 234 264 +f 235 266 236 +f 266 235 265 +f 236 267 237 +f 267 236 266 +f 237 268 238 +f 268 237 267 +f 238 269 239 +f 269 238 268 +f 239 270 240 +f 270 239 269 +f 240 271 241 +f 271 240 270 +f 241 242 212 +f 242 241 271 +f 242 273 243 +f 273 242 272 +f 243 274 244 +f 274 243 273 +f 244 275 245 +f 275 244 274 +f 245 276 246 +f 276 245 275 +f 246 277 247 +f 277 246 276 +f 247 278 248 +f 278 247 277 +f 248 279 249 +f 279 248 278 +f 249 280 250 +f 280 249 279 +f 250 281 251 +f 281 250 280 +f 251 282 252 +f 282 251 281 +f 252 283 253 +f 283 252 282 +f 253 284 254 +f 284 253 283 +f 254 285 255 +f 285 254 284 +f 255 286 256 +f 286 255 285 +f 256 287 257 +f 287 256 286 +f 257 288 258 +f 288 257 287 +f 258 289 259 +f 289 258 288 +f 259 290 260 +f 290 259 289 +f 260 291 261 +f 291 260 290 +f 261 292 262 +f 292 261 291 +f 262 293 263 +f 293 262 292 +f 263 294 264 +f 294 263 293 +f 264 295 265 +f 295 264 294 +f 265 296 266 +f 296 265 295 +f 266 297 267 +f 297 266 296 +f 267 298 268 +f 298 267 297 +f 268 299 269 +f 299 268 298 +f 269 300 270 +f 300 269 299 +f 270 301 271 +f 301 270 300 +f 271 272 242 +f 272 271 301 +f 272 303 273 +f 303 272 302 +f 273 304 274 +f 304 273 303 +f 274 305 275 +f 305 274 304 +f 275 306 276 +f 306 275 305 +f 276 307 277 +f 307 276 306 +f 277 308 278 +f 308 277 307 +f 278 309 279 +f 309 278 308 +f 279 310 280 +f 310 279 309 +f 280 311 281 +f 311 280 310 +f 281 312 282 +f 312 281 311 +f 282 313 283 +f 313 282 312 +f 283 314 284 +f 314 283 313 +f 284 315 285 +f 315 284 314 +f 285 316 286 +f 316 285 315 +f 286 317 287 +f 317 286 316 +f 287 318 288 +f 318 287 317 +f 288 319 289 +f 319 288 318 +f 289 320 290 +f 320 289 319 +f 290 321 291 +f 321 290 320 +f 291 322 292 +f 322 291 321 +f 292 323 293 +f 323 292 322 +f 293 324 294 +f 324 293 323 +f 294 325 295 +f 325 294 324 +f 295 326 296 +f 326 295 325 +f 296 327 297 +f 327 296 326 +f 297 328 298 +f 328 297 327 +f 298 329 299 +f 329 298 328 +f 299 330 300 +f 330 299 329 +f 300 331 301 +f 331 300 330 +f 301 302 272 +f 302 301 331 +f 302 333 303 +f 333 302 332 +f 303 334 304 +f 334 303 333 +f 304 335 305 +f 335 304 334 +f 305 336 306 +f 336 305 335 +f 306 337 307 +f 337 306 336 +f 307 338 308 +f 338 307 337 +f 308 339 309 +f 339 308 338 +f 309 340 310 +f 340 309 339 +f 310 341 311 +f 341 310 340 +f 311 342 312 +f 342 311 341 +f 312 343 313 +f 343 312 342 +f 313 344 314 +f 344 313 343 +f 314 345 315 +f 345 314 344 +f 315 346 316 +f 346 315 345 +f 316 347 317 +f 347 316 346 +f 317 348 318 +f 348 317 347 +f 318 349 319 +f 349 318 348 +f 319 350 320 +f 350 319 349 +f 320 351 321 +f 351 320 350 +f 321 352 322 +f 352 321 351 +f 322 353 323 +f 353 322 352 +f 323 354 324 +f 354 323 353 +f 324 355 325 +f 355 324 354 +f 325 356 326 +f 356 325 355 +f 326 357 327 +f 357 326 356 +f 327 358 328 +f 358 327 357 +f 328 359 329 +f 359 328 358 +f 329 360 330 +f 360 329 359 +f 330 361 331 +f 361 330 360 +f 331 332 302 +f 332 331 361 +f 332 363 333 +f 363 332 362 +f 333 364 334 +f 364 333 363 +f 334 365 335 +f 365 334 364 +f 335 366 336 +f 366 335 365 +f 336 367 337 +f 367 336 366 +f 337 368 338 +f 368 337 367 +f 338 369 339 +f 369 338 368 +f 339 370 340 +f 370 339 369 +f 340 371 341 +f 371 340 370 +f 341 372 342 +f 372 341 371 +f 342 373 343 +f 373 342 372 +f 343 374 344 +f 374 343 373 +f 344 375 345 +f 375 344 374 +f 345 376 346 +f 376 345 375 +f 346 377 347 +f 377 346 376 +f 347 378 348 +f 378 347 377 +f 348 379 349 +f 379 348 378 +f 349 380 350 +f 380 349 379 +f 350 381 351 +f 381 350 380 +f 351 382 352 +f 382 351 381 +f 352 383 353 +f 383 352 382 +f 353 384 354 +f 384 353 383 +f 354 385 355 +f 385 354 384 +f 355 386 356 +f 386 355 385 +f 356 387 357 +f 387 356 386 +f 357 388 358 +f 388 357 387 +f 358 389 359 +f 389 358 388 +f 359 390 360 +f 390 359 389 +f 360 391 361 +f 391 360 390 +f 361 362 332 +f 362 361 391 +f 362 393 363 +f 393 362 392 +f 363 394 364 +f 394 363 393 +f 364 395 365 +f 395 364 394 +f 365 396 366 +f 396 365 395 +f 366 397 367 +f 397 366 396 +f 367 398 368 +f 398 367 397 +f 368 399 369 +f 399 368 398 +f 369 400 370 +f 400 369 399 +f 370 401 371 +f 401 370 400 +f 371 402 372 +f 402 371 401 +f 372 403 373 +f 403 372 402 +f 373 404 374 +f 404 373 403 +f 374 405 375 +f 405 374 404 +f 375 406 376 +f 406 375 405 +f 376 407 377 +f 407 376 406 +f 377 408 378 +f 408 377 407 +f 378 409 379 +f 409 378 408 +f 379 410 380 +f 410 379 409 +f 380 411 381 +f 411 380 410 +f 381 412 382 +f 412 381 411 +f 382 413 383 +f 413 382 412 +f 383 414 384 +f 414 383 413 +f 384 415 385 +f 415 384 414 +f 385 416 386 +f 416 385 415 +f 386 417 387 +f 417 386 416 +f 387 418 388 +f 418 387 417 +f 388 419 389 +f 419 388 418 +f 389 420 390 +f 420 389 419 +f 390 421 391 +f 421 390 420 +f 391 392 362 +f 392 391 421 +f 392 422 393 +f 393 422 394 +f 394 422 395 +f 395 422 396 +f 396 422 397 +f 397 422 398 +f 398 422 399 +f 399 422 400 +f 400 422 401 +f 401 422 402 +f 402 422 403 +f 403 422 404 +f 404 422 405 +f 405 422 406 +f 406 422 407 +f 407 422 408 +f 408 422 409 +f 409 422 410 +f 410 422 411 +f 411 422 412 +f 412 422 413 +f 413 422 414 +f 414 422 415 +f 415 422 416 +f 416 422 417 +f 417 422 418 +f 418 422 419 +f 419 422 420 +f 420 422 421 +f 421 422 392 +# 840 faces, 0 coords texture + +# End of File \ No newline at end of file diff --git a/mesh-master/data/unittest/sphere.ply b/mesh-master/data/unittest/sphere.ply new file mode 100644 index 0000000000000000000000000000000000000000..6593ad2c215cb2de3b5e26cf9f01cffac1e7a5f8 --- /dev/null +++ b/mesh-master/data/unittest/sphere.ply @@ -0,0 +1,1271 @@ +ply +format ascii 1.0 +element vertex 422 +property float x +property float y +property float z +element face 840 +property list uchar int vertex_indices +end_header +0 0 -127 +5 25 -125 +10 24 -125 +15 21 -125 +19 17 -125 +22 13 -125 +25 8 -125 +26 2 -125 +26 -3 -125 +25 -9 -125 +22 -14 -125 +19 -18 -125 +15 -22 -125 +10 -25 -125 +5 -26 -125 +0 -27 -125 +-6 -26 -125 +-11 -25 -125 +-16 -22 -125 +-20 -18 -125 +-23 -14 -125 +-26 -9 -125 +-27 -3 -125 +-27 2 -125 +-26 8 -125 +-23 13 -125 +-20 17 -125 +-16 21 -125 +-11 24 -125 +-6 25 -125 +-1 26 -125 +10 50 -117 +21 47 -117 +30 41 -117 +38 34 -117 +44 25 -117 +49 15 -117 +51 5 -117 +51 -6 -117 +49 -16 -117 +44 -26 -117 +38 -35 -117 +30 -42 -117 +21 -48 -117 +10 -51 -117 +0 -52 -117 +-11 -51 -117 +-22 -48 -117 +-31 -42 -117 +-39 -35 -117 +-45 -26 -117 +-50 -16 -117 +-52 -6 -117 +-52 5 -117 +-50 15 -117 +-45 25 -117 +-39 34 -117 +-31 41 -117 +-22 47 -117 +-11 50 -117 +-1 51 -117 +15 73 -103 +30 68 -103 +43 60 -103 +55 49 -103 +64 37 -103 +70 23 -103 +74 7 -103 +74 -8 -103 +70 -24 -103 +64 -38 -103 +55 -50 -103 +43 -61 -103 +30 -69 -103 +15 -74 -103 +0 -75 -103 +-16 -74 -103 +-31 -69 -103 +-44 -61 -103 +-56 -50 -103 +-65 -38 -103 +-71 -24 -103 +-75 -8 -103 +-75 7 -103 +-71 23 -103 +-65 37 -103 +-56 49 -103 +-44 60 -103 +-31 68 -103 +-16 73 -103 +-1 74 -103 +19 92 -85 +38 86 -85 +55 76 -85 +70 63 -85 +81 47 -85 +89 29 -85 +93 9 -85 +93 -10 -85 +89 -30 -85 +81 -48 -85 +70 -64 -85 +55 -77 -85 +38 -87 -85 +19 -93 -85 +0 -95 -85 +-20 -93 -85 +-39 -87 -85 +-56 -77 -85 +-71 -64 -85 +-82 -48 -85 +-90 -30 -85 +-94 -10 -85 +-94 9 -85 +-90 29 -85 +-82 47 -85 +-71 63 -85 +-56 76 -85 +-39 86 -85 +-20 92 -85 +-1 94 -85 +22 107 -64 +44 100 -64 +64 88 -64 +81 73 -64 +95 54 -64 +104 33 -64 +109 11 -64 +109 -12 -64 +104 -34 -64 +95 -55 -64 +81 -74 -64 +64 -89 -64 +44 -101 -64 +22 -108 -64 +0 -110 -64 +-23 -108 -64 +-45 -101 -64 +-65 -89 -64 +-82 -74 -64 +-96 -55 -64 +-105 -34 -64 +-110 -12 -64 +-110 11 -64 +-105 33 -64 +-96 54 -64 +-82 73 -64 +-65 88 -64 +-45 100 -64 +-23 107 -64 +-1 109 -64 +25 118 -40 +49 110 -40 +70 97 -40 +89 80 -40 +104 60 -40 +114 37 -40 +120 12 -40 +120 -13 -40 +114 -38 -40 +104 -61 -40 +89 -81 -40 +70 -98 -40 +49 -111 -40 +25 -119 -40 +0 -121 -40 +-26 -119 -40 +-50 -111 -40 +-71 -98 -40 +-90 -81 -40 +-105 -61 -40 +-115 -38 -40 +-121 -13 -40 +-121 12 -40 +-115 37 -40 +-105 60 -40 +-90 80 -40 +-71 97 -40 +-50 110 -40 +-26 118 -40 +-1 120 -40 +26 123 -14 +51 115 -14 +74 102 -14 +93 84 -14 +109 63 -14 +120 39 -14 +125 13 -14 +125 -14 -14 +120 -40 -14 +109 -64 -14 +93 -85 -14 +74 -103 -14 +51 -116 -14 +26 -124 -14 +0 -127 -14 +-27 -124 -14 +-52 -116 -14 +-75 -103 -14 +-94 -85 -14 +-110 -64 -14 +-121 -40 -14 +-126 -14 -14 +-126 13 -14 +-121 39 -14 +-110 63 -14 +-94 84 -14 +-75 102 -14 +-52 115 -14 +-27 123 -14 +-1 126 -14 +26 123 13 +51 115 13 +74 102 13 +93 84 13 +109 63 13 +120 39 13 +125 13 13 +125 -14 13 +120 -40 13 +109 -64 13 +93 -85 13 +74 -103 13 +51 -116 13 +26 -124 13 +0 -127 13 +-27 -124 13 +-52 -116 13 +-75 -103 13 +-94 -85 13 +-110 -64 13 +-121 -40 13 +-126 -14 13 +-126 13 13 +-121 39 13 +-110 63 13 +-94 84 13 +-75 102 13 +-52 115 13 +-27 123 13 +-1 126 13 +25 118 39 +49 110 39 +70 97 39 +89 80 39 +104 60 39 +114 37 39 +120 12 39 +120 -13 39 +114 -38 39 +104 -61 39 +89 -81 39 +70 -98 39 +49 -111 39 +25 -119 39 +0 -121 39 +-26 -119 39 +-50 -111 39 +-71 -98 39 +-90 -81 39 +-105 -61 39 +-115 -38 39 +-121 -13 39 +-121 12 39 +-115 37 39 +-105 60 39 +-90 80 39 +-71 97 39 +-50 110 39 +-26 118 39 +-1 120 39 +22 107 63 +44 100 63 +64 88 63 +81 73 63 +95 54 63 +104 33 63 +109 11 63 +109 -12 63 +104 -34 63 +95 -55 63 +81 -74 63 +64 -89 63 +44 -101 63 +22 -108 63 +0 -110 63 +-23 -108 63 +-45 -101 63 +-65 -89 63 +-82 -74 63 +-96 -55 63 +-105 -34 63 +-110 -12 63 +-110 11 63 +-105 33 63 +-96 54 63 +-82 73 63 +-65 88 63 +-45 100 63 +-23 107 63 +-1 109 63 +19 92 84 +38 86 84 +55 76 84 +70 63 84 +81 47 84 +89 29 84 +93 9 84 +93 -10 84 +89 -30 84 +81 -48 84 +70 -64 84 +55 -77 84 +38 -87 84 +19 -93 84 +0 -95 84 +-20 -93 84 +-39 -87 84 +-56 -77 84 +-71 -64 84 +-82 -48 84 +-90 -30 84 +-94 -10 84 +-94 9 84 +-90 29 84 +-82 47 84 +-71 63 84 +-56 76 84 +-39 86 84 +-20 92 84 +-1 94 84 +15 73 102 +30 68 102 +43 60 102 +55 49 102 +64 37 102 +70 23 102 +74 7 102 +74 -8 102 +70 -24 102 +64 -38 102 +55 -50 102 +43 -61 102 +30 -69 102 +15 -74 102 +0 -75 102 +-16 -74 102 +-31 -69 102 +-44 -61 102 +-56 -50 102 +-65 -38 102 +-71 -24 102 +-75 -8 102 +-75 7 102 +-71 23 102 +-65 37 102 +-56 49 102 +-44 60 102 +-31 68 102 +-16 73 102 +-1 74 102 +10 50 116 +21 47 116 +30 41 116 +38 34 116 +44 25 116 +49 15 116 +51 5 116 +51 -6 116 +49 -16 116 +44 -26 116 +38 -35 116 +30 -42 116 +21 -48 116 +10 -51 116 +0 -52 116 +-11 -51 116 +-22 -48 116 +-31 -42 116 +-39 -35 116 +-45 -26 116 +-50 -16 116 +-52 -6 116 +-52 5 116 +-50 15 116 +-45 25 116 +-39 34 116 +-31 41 116 +-22 47 116 +-11 50 116 +-1 51 116 +5 25 124 +10 24 124 +15 21 124 +19 17 124 +22 13 124 +25 8 124 +26 2 124 +26 -3 124 +25 -9 124 +22 -14 124 +19 -18 124 +15 -22 124 +10 -25 124 +5 -26 124 +0 -27 124 +-6 -26 124 +-11 -25 124 +-16 -22 124 +-20 -18 124 +-23 -14 124 +-26 -9 124 +-27 -3 124 +-27 2 124 +-26 8 124 +-23 13 124 +-20 17 124 +-16 21 124 +-11 24 124 +-6 25 124 +-1 26 124 +0 0 127 +3 0 1 2 +3 0 2 3 +3 0 3 4 +3 0 4 5 +3 0 5 6 +3 0 6 7 +3 0 7 8 +3 0 8 9 +3 0 9 10 +3 0 10 11 +3 0 11 12 +3 0 12 13 +3 0 13 14 +3 0 14 15 +3 0 15 16 +3 0 16 17 +3 0 17 18 +3 0 18 19 +3 0 19 20 +3 0 20 21 +3 0 21 22 +3 0 22 23 +3 0 23 24 +3 0 24 25 +3 0 25 26 +3 0 26 27 +3 0 27 28 +3 0 28 29 +3 0 29 30 +3 0 30 1 +3 1 32 2 +3 32 1 31 +3 2 33 3 +3 33 2 32 +3 3 34 4 +3 34 3 33 +3 4 35 5 +3 35 4 34 +3 5 36 6 +3 36 5 35 +3 6 37 7 +3 37 6 36 +3 7 38 8 +3 38 7 37 +3 8 39 9 +3 39 8 38 +3 9 40 10 +3 40 9 39 +3 10 41 11 +3 41 10 40 +3 11 42 12 +3 42 11 41 +3 12 43 13 +3 43 12 42 +3 13 44 14 +3 44 13 43 +3 14 45 15 +3 45 14 44 +3 15 46 16 +3 46 15 45 +3 16 47 17 +3 47 16 46 +3 17 48 18 +3 48 17 47 +3 18 49 19 +3 49 18 48 +3 19 50 20 +3 50 19 49 +3 20 51 21 +3 51 20 50 +3 21 52 22 +3 52 21 51 +3 22 53 23 +3 53 22 52 +3 23 54 24 +3 54 23 53 +3 24 55 25 +3 55 24 54 +3 25 56 26 +3 56 25 55 +3 26 57 27 +3 57 26 56 +3 27 58 28 +3 58 27 57 +3 28 59 29 +3 59 28 58 +3 29 60 30 +3 60 29 59 +3 30 31 1 +3 31 30 60 +3 31 62 32 +3 62 31 61 +3 32 63 33 +3 63 32 62 +3 33 64 34 +3 64 33 63 +3 34 65 35 +3 65 34 64 +3 35 66 36 +3 66 35 65 +3 36 67 37 +3 67 36 66 +3 37 68 38 +3 68 37 67 +3 38 69 39 +3 69 38 68 +3 39 70 40 +3 70 39 69 +3 40 71 41 +3 71 40 70 +3 41 72 42 +3 72 41 71 +3 42 73 43 +3 73 42 72 +3 43 74 44 +3 74 43 73 +3 44 75 45 +3 75 44 74 +3 45 76 46 +3 76 45 75 +3 46 77 47 +3 77 46 76 +3 47 78 48 +3 78 47 77 +3 48 79 49 +3 79 48 78 +3 49 80 50 +3 80 49 79 +3 50 81 51 +3 81 50 80 +3 51 82 52 +3 82 51 81 +3 52 83 53 +3 83 52 82 +3 53 84 54 +3 84 53 83 +3 54 85 55 +3 85 54 84 +3 55 86 56 +3 86 55 85 +3 56 87 57 +3 87 56 86 +3 57 88 58 +3 88 57 87 +3 58 89 59 +3 89 58 88 +3 59 90 60 +3 90 59 89 +3 60 61 31 +3 61 60 90 +3 61 92 62 +3 92 61 91 +3 62 93 63 +3 93 62 92 +3 63 94 64 +3 94 63 93 +3 64 95 65 +3 95 64 94 +3 65 96 66 +3 96 65 95 +3 66 97 67 +3 97 66 96 +3 67 98 68 +3 98 67 97 +3 68 99 69 +3 99 68 98 +3 69 100 70 +3 100 69 99 +3 70 101 71 +3 101 70 100 +3 71 102 72 +3 102 71 101 +3 72 103 73 +3 103 72 102 +3 73 104 74 +3 104 73 103 +3 74 105 75 +3 105 74 104 +3 75 106 76 +3 106 75 105 +3 76 107 77 +3 107 76 106 +3 77 108 78 +3 108 77 107 +3 78 109 79 +3 109 78 108 +3 79 110 80 +3 110 79 109 +3 80 111 81 +3 111 80 110 +3 81 112 82 +3 112 81 111 +3 82 113 83 +3 113 82 112 +3 83 114 84 +3 114 83 113 +3 84 115 85 +3 115 84 114 +3 85 116 86 +3 116 85 115 +3 86 117 87 +3 117 86 116 +3 87 118 88 +3 118 87 117 +3 88 119 89 +3 119 88 118 +3 89 120 90 +3 120 89 119 +3 90 91 61 +3 91 90 120 +3 91 122 92 +3 122 91 121 +3 92 123 93 +3 123 92 122 +3 93 124 94 +3 124 93 123 +3 94 125 95 +3 125 94 124 +3 95 126 96 +3 126 95 125 +3 96 127 97 +3 127 96 126 +3 97 128 98 +3 128 97 127 +3 98 129 99 +3 129 98 128 +3 99 130 100 +3 130 99 129 +3 100 131 101 +3 131 100 130 +3 101 132 102 +3 132 101 131 +3 102 133 103 +3 133 102 132 +3 103 134 104 +3 134 103 133 +3 104 135 105 +3 135 104 134 +3 105 136 106 +3 136 105 135 +3 106 137 107 +3 137 106 136 +3 107 138 108 +3 138 107 137 +3 108 139 109 +3 139 108 138 +3 109 140 110 +3 140 109 139 +3 110 141 111 +3 141 110 140 +3 111 142 112 +3 142 111 141 +3 112 143 113 +3 143 112 142 +3 113 144 114 +3 144 113 143 +3 114 145 115 +3 145 114 144 +3 115 146 116 +3 146 115 145 +3 116 147 117 +3 147 116 146 +3 117 148 118 +3 148 117 147 +3 118 149 119 +3 149 118 148 +3 119 150 120 +3 150 119 149 +3 120 121 91 +3 121 120 150 +3 121 152 122 +3 152 121 151 +3 122 153 123 +3 153 122 152 +3 123 154 124 +3 154 123 153 +3 124 155 125 +3 155 124 154 +3 125 156 126 +3 156 125 155 +3 126 157 127 +3 157 126 156 +3 127 158 128 +3 158 127 157 +3 128 159 129 +3 159 128 158 +3 129 160 130 +3 160 129 159 +3 130 161 131 +3 161 130 160 +3 131 162 132 +3 162 131 161 +3 132 163 133 +3 163 132 162 +3 133 164 134 +3 164 133 163 +3 134 165 135 +3 165 134 164 +3 135 166 136 +3 166 135 165 +3 136 167 137 +3 167 136 166 +3 137 168 138 +3 168 137 167 +3 138 169 139 +3 169 138 168 +3 139 170 140 +3 170 139 169 +3 140 171 141 +3 171 140 170 +3 141 172 142 +3 172 141 171 +3 142 173 143 +3 173 142 172 +3 143 174 144 +3 174 143 173 +3 144 175 145 +3 175 144 174 +3 145 176 146 +3 176 145 175 +3 146 177 147 +3 177 146 176 +3 147 178 148 +3 178 147 177 +3 148 179 149 +3 179 148 178 +3 149 180 150 +3 180 149 179 +3 150 151 121 +3 151 150 180 +3 151 182 152 +3 182 151 181 +3 152 183 153 +3 183 152 182 +3 153 184 154 +3 184 153 183 +3 154 185 155 +3 185 154 184 +3 155 186 156 +3 186 155 185 +3 156 187 157 +3 187 156 186 +3 157 188 158 +3 188 157 187 +3 158 189 159 +3 189 158 188 +3 159 190 160 +3 190 159 189 +3 160 191 161 +3 191 160 190 +3 161 192 162 +3 192 161 191 +3 162 193 163 +3 193 162 192 +3 163 194 164 +3 194 163 193 +3 164 195 165 +3 195 164 194 +3 165 196 166 +3 196 165 195 +3 166 197 167 +3 197 166 196 +3 167 198 168 +3 198 167 197 +3 168 199 169 +3 199 168 198 +3 169 200 170 +3 200 169 199 +3 170 201 171 +3 201 170 200 +3 171 202 172 +3 202 171 201 +3 172 203 173 +3 203 172 202 +3 173 204 174 +3 204 173 203 +3 174 205 175 +3 205 174 204 +3 175 206 176 +3 206 175 205 +3 176 207 177 +3 207 176 206 +3 177 208 178 +3 208 177 207 +3 178 209 179 +3 209 178 208 +3 179 210 180 +3 210 179 209 +3 180 181 151 +3 181 180 210 +3 181 212 182 +3 212 181 211 +3 182 213 183 +3 213 182 212 +3 183 214 184 +3 214 183 213 +3 184 215 185 +3 215 184 214 +3 185 216 186 +3 216 185 215 +3 186 217 187 +3 217 186 216 +3 187 218 188 +3 218 187 217 +3 188 219 189 +3 219 188 218 +3 189 220 190 +3 220 189 219 +3 190 221 191 +3 221 190 220 +3 191 222 192 +3 222 191 221 +3 192 223 193 +3 223 192 222 +3 193 224 194 +3 224 193 223 +3 194 225 195 +3 225 194 224 +3 195 226 196 +3 226 195 225 +3 196 227 197 +3 227 196 226 +3 197 228 198 +3 228 197 227 +3 198 229 199 +3 229 198 228 +3 199 230 200 +3 230 199 229 +3 200 231 201 +3 231 200 230 +3 201 232 202 +3 232 201 231 +3 202 233 203 +3 233 202 232 +3 203 234 204 +3 234 203 233 +3 204 235 205 +3 235 204 234 +3 205 236 206 +3 236 205 235 +3 206 237 207 +3 237 206 236 +3 207 238 208 +3 238 207 237 +3 208 239 209 +3 239 208 238 +3 209 240 210 +3 240 209 239 +3 210 211 181 +3 211 210 240 +3 211 242 212 +3 242 211 241 +3 212 243 213 +3 243 212 242 +3 213 244 214 +3 244 213 243 +3 214 245 215 +3 245 214 244 +3 215 246 216 +3 246 215 245 +3 216 247 217 +3 247 216 246 +3 217 248 218 +3 248 217 247 +3 218 249 219 +3 249 218 248 +3 219 250 220 +3 250 219 249 +3 220 251 221 +3 251 220 250 +3 221 252 222 +3 252 221 251 +3 222 253 223 +3 253 222 252 +3 223 254 224 +3 254 223 253 +3 224 255 225 +3 255 224 254 +3 225 256 226 +3 256 225 255 +3 226 257 227 +3 257 226 256 +3 227 258 228 +3 258 227 257 +3 228 259 229 +3 259 228 258 +3 229 260 230 +3 260 229 259 +3 230 261 231 +3 261 230 260 +3 231 262 232 +3 262 231 261 +3 232 263 233 +3 263 232 262 +3 233 264 234 +3 264 233 263 +3 234 265 235 +3 265 234 264 +3 235 266 236 +3 266 235 265 +3 236 267 237 +3 267 236 266 +3 237 268 238 +3 268 237 267 +3 238 269 239 +3 269 238 268 +3 239 270 240 +3 270 239 269 +3 240 241 211 +3 241 240 270 +3 241 272 242 +3 272 241 271 +3 242 273 243 +3 273 242 272 +3 243 274 244 +3 274 243 273 +3 244 275 245 +3 275 244 274 +3 245 276 246 +3 276 245 275 +3 246 277 247 +3 277 246 276 +3 247 278 248 +3 278 247 277 +3 248 279 249 +3 279 248 278 +3 249 280 250 +3 280 249 279 +3 250 281 251 +3 281 250 280 +3 251 282 252 +3 282 251 281 +3 252 283 253 +3 283 252 282 +3 253 284 254 +3 284 253 283 +3 254 285 255 +3 285 254 284 +3 255 286 256 +3 286 255 285 +3 256 287 257 +3 287 256 286 +3 257 288 258 +3 288 257 287 +3 258 289 259 +3 289 258 288 +3 259 290 260 +3 290 259 289 +3 260 291 261 +3 291 260 290 +3 261 292 262 +3 292 261 291 +3 262 293 263 +3 293 262 292 +3 263 294 264 +3 294 263 293 +3 264 295 265 +3 295 264 294 +3 265 296 266 +3 296 265 295 +3 266 297 267 +3 297 266 296 +3 267 298 268 +3 298 267 297 +3 268 299 269 +3 299 268 298 +3 269 300 270 +3 300 269 299 +3 270 271 241 +3 271 270 300 +3 271 302 272 +3 302 271 301 +3 272 303 273 +3 303 272 302 +3 273 304 274 +3 304 273 303 +3 274 305 275 +3 305 274 304 +3 275 306 276 +3 306 275 305 +3 276 307 277 +3 307 276 306 +3 277 308 278 +3 308 277 307 +3 278 309 279 +3 309 278 308 +3 279 310 280 +3 310 279 309 +3 280 311 281 +3 311 280 310 +3 281 312 282 +3 312 281 311 +3 282 313 283 +3 313 282 312 +3 283 314 284 +3 314 283 313 +3 284 315 285 +3 315 284 314 +3 285 316 286 +3 316 285 315 +3 286 317 287 +3 317 286 316 +3 287 318 288 +3 318 287 317 +3 288 319 289 +3 319 288 318 +3 289 320 290 +3 320 289 319 +3 290 321 291 +3 321 290 320 +3 291 322 292 +3 322 291 321 +3 292 323 293 +3 323 292 322 +3 293 324 294 +3 324 293 323 +3 294 325 295 +3 325 294 324 +3 295 326 296 +3 326 295 325 +3 296 327 297 +3 327 296 326 +3 297 328 298 +3 328 297 327 +3 298 329 299 +3 329 298 328 +3 299 330 300 +3 330 299 329 +3 300 301 271 +3 301 300 330 +3 301 332 302 +3 332 301 331 +3 302 333 303 +3 333 302 332 +3 303 334 304 +3 334 303 333 +3 304 335 305 +3 335 304 334 +3 305 336 306 +3 336 305 335 +3 306 337 307 +3 337 306 336 +3 307 338 308 +3 338 307 337 +3 308 339 309 +3 339 308 338 +3 309 340 310 +3 340 309 339 +3 310 341 311 +3 341 310 340 +3 311 342 312 +3 342 311 341 +3 312 343 313 +3 343 312 342 +3 313 344 314 +3 344 313 343 +3 314 345 315 +3 345 314 344 +3 315 346 316 +3 346 315 345 +3 316 347 317 +3 347 316 346 +3 317 348 318 +3 348 317 347 +3 318 349 319 +3 349 318 348 +3 319 350 320 +3 350 319 349 +3 320 351 321 +3 351 320 350 +3 321 352 322 +3 352 321 351 +3 322 353 323 +3 353 322 352 +3 323 354 324 +3 354 323 353 +3 324 355 325 +3 355 324 354 +3 325 356 326 +3 356 325 355 +3 326 357 327 +3 357 326 356 +3 327 358 328 +3 358 327 357 +3 328 359 329 +3 359 328 358 +3 329 360 330 +3 360 329 359 +3 330 331 301 +3 331 330 360 +3 331 362 332 +3 362 331 361 +3 332 363 333 +3 363 332 362 +3 333 364 334 +3 364 333 363 +3 334 365 335 +3 365 334 364 +3 335 366 336 +3 366 335 365 +3 336 367 337 +3 367 336 366 +3 337 368 338 +3 368 337 367 +3 338 369 339 +3 369 338 368 +3 339 370 340 +3 370 339 369 +3 340 371 341 +3 371 340 370 +3 341 372 342 +3 372 341 371 +3 342 373 343 +3 373 342 372 +3 343 374 344 +3 374 343 373 +3 344 375 345 +3 375 344 374 +3 345 376 346 +3 376 345 375 +3 346 377 347 +3 377 346 376 +3 347 378 348 +3 378 347 377 +3 348 379 349 +3 379 348 378 +3 349 380 350 +3 380 349 379 +3 350 381 351 +3 381 350 380 +3 351 382 352 +3 382 351 381 +3 352 383 353 +3 383 352 382 +3 353 384 354 +3 384 353 383 +3 354 385 355 +3 385 354 384 +3 355 386 356 +3 386 355 385 +3 356 387 357 +3 387 356 386 +3 357 388 358 +3 388 357 387 +3 358 389 359 +3 389 358 388 +3 359 390 360 +3 390 359 389 +3 360 361 331 +3 361 360 390 +3 361 392 362 +3 392 361 391 +3 362 393 363 +3 393 362 392 +3 363 394 364 +3 394 363 393 +3 364 395 365 +3 395 364 394 +3 365 396 366 +3 396 365 395 +3 366 397 367 +3 397 366 396 +3 367 398 368 +3 398 367 397 +3 368 399 369 +3 399 368 398 +3 369 400 370 +3 400 369 399 +3 370 401 371 +3 401 370 400 +3 371 402 372 +3 402 371 401 +3 372 403 373 +3 403 372 402 +3 373 404 374 +3 404 373 403 +3 374 405 375 +3 405 374 404 +3 375 406 376 +3 406 375 405 +3 376 407 377 +3 407 376 406 +3 377 408 378 +3 408 377 407 +3 378 409 379 +3 409 378 408 +3 379 410 380 +3 410 379 409 +3 380 411 381 +3 411 380 410 +3 381 412 382 +3 412 381 411 +3 382 413 383 +3 413 382 412 +3 383 414 384 +3 414 383 413 +3 384 415 385 +3 415 384 414 +3 385 416 386 +3 416 385 415 +3 386 417 387 +3 417 386 416 +3 387 418 388 +3 418 387 417 +3 388 419 389 +3 419 388 418 +3 389 420 390 +3 420 389 419 +3 390 391 361 +3 391 390 420 +3 391 421 392 +3 392 421 393 +3 393 421 394 +3 394 421 395 +3 395 421 396 +3 396 421 397 +3 397 421 398 +3 398 421 399 +3 399 421 400 +3 400 421 401 +3 401 421 402 +3 402 421 403 +3 403 421 404 +3 404 421 405 +3 405 421 406 +3 406 421 407 +3 407 421 408 +3 408 421 409 +3 409 421 410 +3 410 421 411 +3 411 421 412 +3 412 421 413 +3 413 421 414 +3 414 421 415 +3 415 421 416 +3 416 421 417 +3 417 421 418 +3 418 421 419 +3 419 421 420 +3 420 421 391 diff --git a/mesh-master/data/unittest/test_box.obj b/mesh-master/data/unittest/test_box.obj new file mode 100644 index 0000000000000000000000000000000000000000..87a428da50178ce1ef334013f576bcf6e3add4ce --- /dev/null +++ b/mesh-master/data/unittest/test_box.obj @@ -0,0 +1,50 @@ +#### +# +# OBJ File Generated by Meshlab +# +#### +# Object test_box.obj +# +# Vertices: 8 +# Faces: 12 +# +#### +vn 0.577350 0.577350 0.577350 +#landmark pospospos +v 0.500000 0.500000 0.500000 +vn -0.333333 0.666667 0.666667 +v -0.500000 0.500000 0.500000 +vn 0.666667 -0.333333 0.666667 +v 0.500000 -0.500000 0.500000 +vn -0.666667 -0.666667 0.333333 +v -0.500000 -0.500000 0.500000 +vn 0.666667 0.666667 -0.333333 +v 0.500000 0.500000 -0.500000 +vn -0.666667 0.333333 -0.666667 +v -0.500000 0.500000 -0.500000 +vn 0.333333 -0.666667 -0.666667 +v 0.500000 -0.500000 -0.500000 +vn -0.577350 -0.577350 -0.577350 +#landmark negnegneg +v -0.500000 -0.500000 -0.500000 +# 8 vertices, 0 vertices normals + +g a +f 1//1 2//2 3//3 +f 4//4 3//3 2//2 +f 1//1 3//3 5//5 +f 7//7 5//5 3//3 +f 1//1 5//5 2//2 +f 6//6 2//2 5//5 +g b +f 8//8 6//6 7//7 +g c +f 5//5 7//7 6//6 +f 8//8 7//7 4//4 +f 3//3 4//4 7//7 +g b +f 8//8 4//4 6//6 +f 2//2 6//6 4//4 +# 12 faces, 0 coords texture + +# End of File diff --git a/mesh-master/data/unittest/test_box.ply b/mesh-master/data/unittest/test_box.ply new file mode 100644 index 0000000000000000000000000000000000000000..b4c4882afb894e0073a5b7585a678f88d4764ea5 --- /dev/null +++ b/mesh-master/data/unittest/test_box.ply @@ -0,0 +1,29 @@ +ply +format ascii 1.0 +element vertex 8 +property float x +property float y +property float z +element face 12 +property list uchar int vertex_indices +end_header +0.5 0.5 0.5 +-0.5 0.5 0.5 +0.5 -0.5 0.5 +-0.5 -0.5 0.5 +0.5 0.5 -0.5 +-0.5 0.5 -0.5 +0.5 -0.5 -0.5 +-0.5 -0.5 -0.5 +3 0 1 2 +3 3 2 1 +3 0 2 4 +3 6 4 2 +3 0 4 1 +3 5 1 4 +3 7 5 6 +3 4 6 5 +3 7 6 3 +3 2 3 6 +3 7 3 5 +3 1 5 3 diff --git a/mesh-master/data/unittest/test_box.pp b/mesh-master/data/unittest/test_box.pp new file mode 100644 index 0000000000000000000000000000000000000000..eb5b0b1afd83a00ec40c1a6a35b3239e205d64d9 --- /dev/null +++ b/mesh-master/data/unittest/test_box.pp @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/mesh-master/data/unittest/test_box_le.ply b/mesh-master/data/unittest/test_box_le.ply new file mode 100644 index 0000000000000000000000000000000000000000..c1e3ac977dccae269f1f3b83fb5806a249f437cb Binary files /dev/null and b/mesh-master/data/unittest/test_box_le.ply differ diff --git a/mesh-master/data/unittest/test_doublebox.obj b/mesh-master/data/unittest/test_doublebox.obj new file mode 100644 index 0000000000000000000000000000000000000000..e6b79414d844593e87cc68212a5103f0a5fd351f --- /dev/null +++ b/mesh-master/data/unittest/test_doublebox.obj @@ -0,0 +1,64 @@ +#### +# +# OBJ File +# +#### +# Object test_doublebox.obj +# +# Vertices: 12 +# Faces: 20 +# +#### +vn 0.577350 0.577350 0.577350 +v 0.500000 0.500000 0.500000 +vn -0.333333 0.666667 0.666667 +v -0.500000 0.500000 0.500000 +vn 0.666667 -0.333333 0.666667 +v 0.500000 -0.500000 0.500000 +vn -0.666667 -0.666667 0.333333 +v -0.500000 -0.500000 0.500000 +vn 0.666667 0.666667 -0.333333 +v 0.500000 0.500000 -0.500000 +vn -0.666667 0.333333 -0.666667 +v -0.500000 0.500000 -0.500000 +vn 0.333333 -0.666667 -0.666667 +v 0.500000 -0.500000 -0.500000 +vn -0.577350 -0.577350 -0.577350 +v -0.500000 -0.500000 -0.500000 + +vn 0.577350 0.577350 0.577350 +v 0.500000 0.500000 1.500000 +vn -0.333333 0.666667 0.666667 +v -0.500000 0.500000 1.500000 +vn 0.666667 -0.333333 0.666667 +v 0.500000 -0.500000 1.500000 +vn -0.666667 -0.666667 0.333333 +v -0.500000 -0.500000 1.500000 +# 12 vertices, 0 vertices normals + +g box1 +f 1//1 3//3 5//5 +f 7//7 5//5 3//3 +f 1//1 5//5 2//2 +f 6//6 2//2 5//5 +f 8//8 6//6 7//7 +f 5//5 7//7 6//6 +f 8//8 7//7 4//4 +f 3//3 4//4 7//7 +f 8//8 4//4 6//6 +f 2//2 6//6 4//4 +g box2 +f 9//9 10//10 11//11 +f 12//12 11//11 10//10 +f 9//9 11//11 1//1 +f 3//3 1//1 11//11 +f 9//9 1//1 10//10 +f 2//2 10//10 1//1 +f 4//4 3//3 12//12 +f 11//11 12//12 3//3 +f 4//4 12//12 2//2 +f 10//10 2//2 12//12 + +# 20 faces, 0 coords texture + +# End of File diff --git a/mesh-master/doc/Makefile b/mesh-master/doc/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..2493d8d66e9e2aa89cfc69ce3258a24337b9b0bd --- /dev/null +++ b/mesh-master/doc/Makefile @@ -0,0 +1,177 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = build + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) +$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) +endif + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/PSBodyMeshPackage.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/PSBodyMeshPackage.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/PSBodyMeshPackage" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/PSBodyMeshPackage" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/mesh-master/doc/make.bat b/mesh-master/doc/make.bat new file mode 100644 index 0000000000000000000000000000000000000000..b850a17378ee0c591d4258806daeb2133d836560 --- /dev/null +++ b/mesh-master/doc/make.bat @@ -0,0 +1,242 @@ +@ECHO OFF + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set BUILDDIR=build +set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source +set I18NSPHINXOPTS=%SPHINXOPTS% source +if NOT "%PAPER%" == "" ( + set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% + set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% +) + +if "%1" == "" goto help + +if "%1" == "help" ( + :help + echo.Please use `make ^` where ^ is one of + echo. html to make standalone HTML files + echo. dirhtml to make HTML files named index.html in directories + echo. singlehtml to make a single large HTML file + echo. pickle to make pickle files + echo. json to make JSON files + echo. htmlhelp to make HTML files and a HTML help project + echo. qthelp to make HTML files and a qthelp project + echo. devhelp to make HTML files and a Devhelp project + echo. epub to make an epub + echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter + echo. text to make text files + echo. man to make manual pages + echo. texinfo to make Texinfo files + echo. gettext to make PO message catalogs + echo. changes to make an overview over all changed/added/deprecated items + echo. xml to make Docutils-native XML files + echo. pseudoxml to make pseudoxml-XML files for display purposes + echo. linkcheck to check all external links for integrity + echo. doctest to run all doctests embedded in the documentation if enabled + goto end +) + +if "%1" == "clean" ( + for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i + del /q /s %BUILDDIR%\* + goto end +) + + +%SPHINXBUILD% 2> nul +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "html" ( + %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/html. + goto end +) + +if "%1" == "dirhtml" ( + %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. + goto end +) + +if "%1" == "singlehtml" ( + %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. + goto end +) + +if "%1" == "pickle" ( + %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the pickle files. + goto end +) + +if "%1" == "json" ( + %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the JSON files. + goto end +) + +if "%1" == "htmlhelp" ( + %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run HTML Help Workshop with the ^ +.hhp project file in %BUILDDIR%/htmlhelp. + goto end +) + +if "%1" == "qthelp" ( + %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run "qcollectiongenerator" with the ^ +.qhcp project file in %BUILDDIR%/qthelp, like this: + echo.^> qcollectiongenerator %BUILDDIR%\qthelp\PSBodyMeshPackage.qhcp + echo.To view the help file: + echo.^> assistant -collectionFile %BUILDDIR%\qthelp\PSBodyMeshPackage.ghc + goto end +) + +if "%1" == "devhelp" ( + %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. + goto end +) + +if "%1" == "epub" ( + %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The epub file is in %BUILDDIR%/epub. + goto end +) + +if "%1" == "latex" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdf" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf + cd %BUILDDIR%/.. + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdfja" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf-ja + cd %BUILDDIR%/.. + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "text" ( + %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The text files are in %BUILDDIR%/text. + goto end +) + +if "%1" == "man" ( + %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The manual pages are in %BUILDDIR%/man. + goto end +) + +if "%1" == "texinfo" ( + %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. + goto end +) + +if "%1" == "gettext" ( + %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The message catalogs are in %BUILDDIR%/locale. + goto end +) + +if "%1" == "changes" ( + %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes + if errorlevel 1 exit /b 1 + echo. + echo.The overview file is in %BUILDDIR%/changes. + goto end +) + +if "%1" == "linkcheck" ( + %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck + if errorlevel 1 exit /b 1 + echo. + echo.Link check complete; look for any errors in the above output ^ +or in %BUILDDIR%/linkcheck/output.txt. + goto end +) + +if "%1" == "doctest" ( + %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest + if errorlevel 1 exit /b 1 + echo. + echo.Testing of doctests in the sources finished, look at the ^ +results in %BUILDDIR%/doctest/output.txt. + goto end +) + +if "%1" == "xml" ( + %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The XML files are in %BUILDDIR%/xml. + goto end +) + +if "%1" == "pseudoxml" ( + %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. + goto end +) + +:end diff --git a/mesh-master/doc/source/conf.py b/mesh-master/doc/source/conf.py new file mode 100644 index 0000000000000000000000000000000000000000..1ff4b9d0c4ff422846fdcf5ac1a5bc6c8ea28a03 --- /dev/null +++ b/mesh-master/doc/source/conf.py @@ -0,0 +1,356 @@ +# -*- coding: utf-8 -*- +# +# PSBody Mesh Package documentation build configuration file, created by +# sphinx-quickstart on Wed Mar 23 07:26:06 2016. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import sphinx_bootstrap_theme + +root_source_folder = os.path.abspath( + os.path.join(os.path.dirname(__file__), + os.pardir, + os.pardir)) +sys.path.insert(0, root_source_folder) + +from mesh.version import __version__ + +print(root_source_folder) +print("version {}".format(__version__)) + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))) + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.intersphinx', + 'sphinx.ext.todo', + 'sphinx.ext.coverage', + 'sphinx.ext.mathjax', + 'sphinx.ext.ifconfig', + 'sphinx.ext.viewcode', + 'sphinx.ext.autosummary', +] + +autoclass_content = 'both' + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix of source filenames. +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'PSBody Mesh Package' +copyright = u'2016, Max Planck Institute for Intelligent Systems, PSBody group' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. + +# The short X.Y version +version = __version__ +# The full version, including alpha/beta/rc tags +release = __version__ + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +#language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'bootstrap' +html_theme_path = sphinx_bootstrap_theme.get_html_theme_path() + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = 'PSBodyMeshPackagedoc' + + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { +# The paper size ('letterpaper' or 'a4paper'). +#'papersize': 'letterpaper', + +# The font size ('10pt', '11pt' or '12pt'). +#'pointsize': '10pt', + +# Additional stuff for the LaTeX preamble. +#'preamble': '', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ('index', 'PSBodyMeshPackage.tex', u'PSBody Mesh Package Documentation', + u'Max Planck Institute for Intelligent Systems, PSBody group', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ('index', 'psbodymeshpackage', u'PSBody Mesh Package Documentation', + [u'Max Planck Institute for Intelligent Systems, PSBody group'], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ('index', 'PSBodyMeshPackage', u'PSBody Mesh Package Documentation', + u'Max Planck Institute for Intelligent Systems, PSBody group', 'PSBodyMeshPackage', 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False + + +# -- Options for Epub output ---------------------------------------------- + +# Bibliographic Dublin Core info. +epub_title = u'PSBody Mesh Package' +epub_author = u'Max Planck Institute for Intelligent Systems, PSBody group' +epub_publisher = u'Max Planck Institute for Intelligent Systems, PSBody group' +epub_copyright = u'2016, Max Planck Institute for Intelligent Systems, PSBody group' + +# The basename for the epub file. It defaults to the project name. +#epub_basename = u'PSBody Mesh Package' + +# The HTML theme for the epub output. Since the default themes are not optimized +# for small screen space, using the same theme for HTML and epub output is +# usually not wise. This defaults to 'epub', a theme designed to save visual +# space. +#epub_theme = 'epub' + +# The language of the text. It defaults to the language option +# or en if the language is not set. +#epub_language = '' + +# The scheme of the identifier. Typical schemes are ISBN or URL. +#epub_scheme = '' + +# The unique identifier of the text. This can be a ISBN number +# or the project homepage. +#epub_identifier = '' + +# A unique identification for the text. +#epub_uid = '' + +# A tuple containing the cover image and cover page html template filenames. +#epub_cover = () + +# A sequence of (type, uri, title) tuples for the guide element of content.opf. +#epub_guide = () + +# HTML files that should be inserted before the pages created by sphinx. +# The format is a list of tuples containing the path and title. +#epub_pre_files = [] + +# HTML files shat should be inserted after the pages created by sphinx. +# The format is a list of tuples containing the path and title. +#epub_post_files = [] + +# A list of files that should not be packed into the epub file. +epub_exclude_files = ['search.html'] + +# The depth of the table of contents in toc.ncx. +#epub_tocdepth = 3 + +# Allow duplicate toc entries. +#epub_tocdup = True + +# Choose between 'default' and 'includehidden'. +#epub_tocscope = 'default' + +# Fix unsupported image types using the PIL. +#epub_fix_images = False + +# Scale large images. +#epub_max_image_width = 0 + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#epub_show_urls = 'inline' + +# If false, no index is generated. +#epub_use_index = True + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = {'http://docs.python.org/': None} diff --git a/mesh-master/doc/source/index.rst b/mesh-master/doc/source/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..318bd19782290271a905ef54baad40210be55242 --- /dev/null +++ b/mesh-master/doc/source/index.rst @@ -0,0 +1,193 @@ +.. PSBody Mesh Package documentation master file, created by + sphinx-quickstart on Wed Mar 23 07:26:06 2016. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to PSBody Mesh Package's documentation! +=============================================== + +This is the documentation of the Mesh package of the Body Group, Max Planck Institute +for Intelligent Systems, Tübingen, Germany. + +Contents: + +.. toctree:: + :maxdepth: 2 + + Mesh + Mesh Viewer + Geometry + + +What is this package about? +=========================== + +This package contains core functions for manipulating Meshes and visualizing them. +It requires ``Python 3.5+`` and is supported on Linux and macOS operating systems. + +Getting started +=============== + +Installation +------------ + +You can download the latest release of the ``psbody-mesh`` package +from the projects `GitHub repository `_. +To install, first you should create a dedicated Python virtual +environment and activate it: + +.. code:: + + $ python3 -m venv --copies my_venv + $ source my_venv/bin/activate + +To compile the binary extensions you will need to install the `Boost +`_ libraries. You can compile your own local +version or simply do: + +.. code:: + + $ sudo apt-get install libboost-dev + +and then compile and install the ``psbody-mesh`` package using the +Makefile. If you are using the system-wide ``Boost libraries``: + +.. code:: + + $ make all + +or the libraries locally installed: + +.. code:: + + $ BOOST_INCLUDE_DIRS=/path/to/boost/include make all + +Testing +------- + +To run the tests simply do: + +.. code:: + + $ make tests + +Documentation +------------- + +A detailed documentation can be compiled using the Makefile: + +.. code:: + + $ make documentation + +Loading a mesh +-------------- + +Loading a :py:class:`Mesh ` class from a file is that easy: + +.. code:: + + from psbody.mesh import Mesh + my_mesh = Mesh(filename='mesh_filename.ply') + +Rendering a mesh +---------------- + +From a previously loaded mesh ``my_mesh``, it is possible to visualize it inside an interactive window using the +:py:class:`MeshViewers ` class: + +.. code:: + + from psbody.mesh import MeshViewers + + # creates a grid of 2x2 mesh viewers + mvs = MeshViewers(shape=[2, 2]) + + # sets the first (top-left) mesh to my_mesh + mvs[0][0].set_static_meshes([my_mesh]) + +Caching +------- + +Some operations make use of caching for performance reasons. The default folder used for caching is + +.. code:: + + ~/.psbody/mesh_package_cache + + +If you need to specify the cache folder, define the environment variable ``PSBODY_MESH_CACHE`` +prior to any loading of the Mesh package: + +.. code:: + + export PSBODY_MESH_CACHE="some/folder" + python + >> from psbody.mesh import Mesh + # now uses the specified cache + +Mesh Viewer +----------- + +``meshviewer`` is a program that allows you to display polygonal meshes produced by ``mesh`` package. + +Viewing a mesh on a local machine +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The most straightforward use-case is viewing the mesh on the same machine where it is stored. To do this simply run + +.. code:: + + $ meshviewer view sphere.obj + +This will create an interactive window with your mesh rendering. You can render more than one mesh in the same window by passing several paths to ``view`` command + +.. code:: + + $ meshviewer view sphere.obj cylinder.obj + +This will arrange the subplots horizontally in a row. If you want a grid arrangement, you can specify the grid parameters explicitly + +.. code:: + + $ meshviewer view -nx 2 -ny 2 *.obj + +Viewing a mesh from a remote machine +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +It is also possible to view a mesh stored on a remote machine. To do this you need mesh to be installed on both the local and the remote machines. You start by opening an empty viewer window listening on a network port + +.. code:: + + (local) $ meshviewer open --port 3000 + +To stream a shape to this viewer you have to either pick a port that is visible from the remote machine or by manually exposing the port when connecting. For example, through SSH port forwarding + +.. code:: + + (local) $ ssh -R 3000:127.0.0.1:3000 user@host + +Then on a remote machine you use ``view`` command pointing to the locally forwarded port + +.. code:: + + (remote) $ meshviewer view -p 3000 sphere.obj + +This should display the remote mesh on your local viewer. In case it does not it might be caused by the network connection being closed before the mesh could be sent. To work around this one can try increasing the timeout up to 1 second + +.. code:: + + (remote) $ meshviewer view -p 3000 --timeout 1 sphere.obj + +To take a snapshot you should locally run a `snap` command + +.. code:: + + (local) $ meshviewer snap -p 3000 sphere.png + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/mesh-master/doc/source/pages/geometry.rst b/mesh-master/doc/source/pages/geometry.rst new file mode 100644 index 0000000000000000000000000000000000000000..cbd367e4ef17eeb4394e0d085e3086b5464a42bf --- /dev/null +++ b/mesh-master/doc/source/pages/geometry.rst @@ -0,0 +1,29 @@ +Geometry +======== + +The geometry subpackage contains some utilities on the geometric entities. + +.. autosummary:: + + psbody.mesh.geometry.triangle_area.triangle_area + psbody.mesh.geometry.rodrigues.rodrigues + psbody.mesh.geometry.barycentric_coordinates_of_projection.barycentric_coordinates_of_projection + psbody.mesh.geometry.cross_product.CrossProduct + + +Reference +--------- + +.. automodule:: psbody.mesh.geometry.triangle_area + :members: + +.. automodule:: psbody.mesh.geometry.rodrigues + :members: + +.. automodule:: psbody.mesh.geometry.barycentric_coordinates_of_projection + :members: + +.. automodule:: psbody.mesh.geometry.cross_product + :members: + + \ No newline at end of file diff --git a/mesh-master/doc/source/pages/mesh.rst b/mesh-master/doc/source/pages/mesh.rst new file mode 100644 index 0000000000000000000000000000000000000000..3597756317f3f4a8e6da885482f178c6ed853392 --- /dev/null +++ b/mesh-master/doc/source/pages/mesh.rst @@ -0,0 +1,31 @@ +Mesh +==== + +The :py:class:`Mesh ` class is the core class of the Mesh package. + +Loading and Saving a mesh +------------------------- + +The entry point for loading a mesh is :py:func:`Mesh.load_from_file `. + +Supported file formats: + +* ply +* obj + +Querying meshes +--------------- +Meshes contain functions to query their content efficiently. + +Transforming meshes +------------------- + + +Reference +--------- + +.. automodule:: psbody.mesh.mesh + :members: + :undoc-members: + + diff --git a/mesh-master/doc/source/pages/mesh_viewer.rst b/mesh-master/doc/source/pages/mesh_viewer.rst new file mode 100644 index 0000000000000000000000000000000000000000..6e510574da4e15082d912512c77ba883bd0f1018 --- /dev/null +++ b/mesh-master/doc/source/pages/mesh_viewer.rst @@ -0,0 +1,11 @@ +Mesh viewer +=========== + + +Reference +--------- + +.. automodule:: psbody.mesh.meshviewer + :members: + :undoc-members: + diff --git a/mesh-master/mesh/CMakeLists.txt b/mesh-master/mesh/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..2c70f4ab3a565af6b718e41f03396af4e794d7b1 --- /dev/null +++ b/mesh-master/mesh/CMakeLists.txt @@ -0,0 +1,99 @@ +# Copyright 2016, Max Planck Society. +# Not licensed +# author Raffi Enficiaud + +cmake_minimum_required (VERSION 2.8.12) +set_property(GLOBAL PROPERTY USE_FOLDERS ON) + +enable_testing() + +project(Mesh) + +add_custom_target(cmake_files SOURCES + cmake/python_helper.cmake + cmake/thirdparty.cmake + src/hijack_python_headers.hpp) + +include(cmake/thirdparty.cmake) +include(cmake/python_helper.cmake) + + +include_directories(${PYTHON_INCLUDE_PATH}) + +set(MESHPYTHON_SRC + __init__.py + mesh.py + meshviewer.py + colors.py + search.py + ) + +# for convenience +add_custom_target(MeshPython SOURCES ${MESHPYTHON_SRC}) + + + + +# aabb_normals +# CGAL_NDEBUG removes calls to logging, warning and error functions that would need +# a link to the CGAL libraries +# CGAL_HAS_NO_THREADS removes the thread safety of the AABB tree with the advantage +# of removing the dependency to boost::thread (and indirectly boost::system compiled library) +# MESH_CGAL_AVOID_COMPILED_VERSION is a define of our own in order to hack a remaining +# log of error without the need to include CGAL generated libraries +# CGAL_NO_AUTOLINK_CGA prevents autolinking on Visual + +set(DEFINES_MESH_EXTENSIONS_WITH_CGAL_WITHOUT_LINK + -DCGAL_NDEBUG=1 + -DMESH_CGAL_AVOID_COMPILED_VERSION=1 + -DCGAL_HAS_NO_THREADS=1 + -DCGAL_NO_AUTOLINK_CGAL=1) + +python_add_library(TARGET aabb_normals SOURCES + src/cgal_error_emulation.hpp + src/AABB_n_tree.h + src/aabb_normals.cpp) +set_property(TARGET aabb_normals PROPERTY FOLDER "GeometryExt/") +target_include_directories(aabb_normals PRIVATE + ${libcgalroot}/include + ${NUMPY_INCLUDE_PATH} + ${Boost_INCLUDE_DIRS}) +target_compile_definitions(aabb_normals PRIVATE + ${DEFINES_MESH_EXTENSIONS_WITH_CGAL_WITHOUT_LINK}) + + +# spatial search +python_add_library(TARGET spatialsearch SOURCES + src/cgal_error_emulation.hpp + src/nearest_point_triangle_3.h + src/nearest_triangle_normals.hpp + src/nearest_triangle.hpp + + src/spatialsearchmodule.cpp +) +set_property(TARGET spatialsearch PROPERTY FOLDER "GeometryExt/") +target_include_directories(spatialsearch PRIVATE + ${libcgalroot}/include + ${NUMPY_INCLUDE_PATH} + ${Boost_INCLUDE_DIRS}) +target_compile_definitions(spatialsearch PRIVATE + ${DEFINES_MESH_EXTENSIONS_WITH_CGAL_WITHOUT_LINK}) + + + +# serialization extensions + +# plyutils +python_add_library(TARGET plyutils SOURCES + src/plyutils.h + src/plyutils.c + src/rply.h + src/rply.c) +set_property(TARGET plyutils PROPERTY FOLDER "SerializationExt/") + +# loadobj +python_add_library(TARGET loadobj SOURCES src/py_loadobj.cpp) +target_include_directories(loadobj PRIVATE + ${NUMPY_INCLUDE_PATH} + ${Boost_INCLUDE_DIRS}) +set_property(TARGET loadobj PROPERTY FOLDER "SerializationExt/") diff --git a/mesh-master/mesh/__init__.py b/mesh-master/mesh/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..474bd5e0925d8939ecaf632634be30261e3a69c4 --- /dev/null +++ b/mesh-master/mesh/__init__.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2013 Max Planck Society. All rights reserved. + +import os +from os.path import abspath, dirname, expanduser, join + +from .mesh import Mesh +from .meshviewer import MeshViewer, MeshViewers + +texture_path = abspath(join(dirname(__file__), '..', 'data', 'template', 'texture_coordinates')) + +if 'PSBODY_MESH_CACHE' in os.environ: + mesh_package_cache_folder = expanduser(os.environ['PSBODY_MESH_CACHE']) +else: + mesh_package_cache_folder = expanduser('~/.psbody/mesh_package_cache') + +if not os.path.exists(mesh_package_cache_folder): + os.makedirs(mesh_package_cache_folder) diff --git a/mesh-master/mesh/arcball.py b/mesh-master/mesh/arcball.py new file mode 100644 index 0000000000000000000000000000000000000000..a266e847801da2de2ab3f6c2d664b6300409e2ef --- /dev/null +++ b/mesh-master/mesh/arcball.py @@ -0,0 +1,247 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Copyright (c) 2012 Max Planck Society. All rights reserved. + +""" +Math utilities, vector, matrix types and ArcBall quaternion rotation class +========================================================================== + +""" + +import numpy as Numeric +import copy +from math import sqrt + +# //assuming IEEE-754(GLfloat), which i believe has max precision of 7 bits +Epsilon = 1.0e-5 + + +class ArcBallT(object): + def __init__(self, NewWidth, NewHeight): + self.m_StVec = Vector3fT() + self.m_EnVec = Vector3fT() + self.m_AdjustWidth = 1.0 + self.m_AdjustHeight = 1.0 + self.setBounds(NewWidth, NewHeight) + + def __str__(self): + str_rep = "" + str_rep += "StVec = " + str(self.m_StVec) + str_rep += "\nEnVec = " + str(self.m_EnVec) + str_rep += "\n scale coords %f %f" % (self.m_AdjustWidth, self.m_AdjustHeight) + return str_rep + + def setBounds(self, NewWidth, NewHeight): + # //Set new bounds + assert (NewWidth > 1.0 and NewHeight > 1.0), "Invalid width or height for bounds." + # //Set adjustment factor for width/height + self.m_AdjustWidth = 1.0 / ((NewWidth - 1.0) * 0.5) + self.m_AdjustHeight = 1.0 / ((NewHeight - 1.0) * 0.5) + + def _mapToSphere(self, NewPt): + # Given a new window coordinate, will modify NewVec in place + X = 0 + Y = 1 + Z = 2 + + NewVec = Vector3fT() + # //Copy paramter into temp point + TempPt = copy.copy(NewPt) + # //Adjust point coords and scale down to range of [-1 ... 1] + TempPt[X] = (NewPt[X] * self.m_AdjustWidth) - 1.0 + TempPt[Y] = 1.0 - (NewPt[Y] * self.m_AdjustHeight) + # //Compute the square of the length of the vector to the point from the center + length = Numeric.sum(Numeric.dot(TempPt, TempPt)) + # //If the point is mapped outside of the sphere... (length > radius squared) + if (length > 1.0): + # //Compute a normalizing factor (radius / sqrt(length)) + norm = 1.0 / sqrt(length) + + # //Return the "normalized" vector, a point on the sphere + NewVec[X] = TempPt[X] * norm + NewVec[Y] = TempPt[Y] * norm + NewVec[Z] = 0.0 + else: # //Else it's on the inside + # //Return a vector to a point mapped inside the sphere sqrt(radius squared - length) + NewVec[X] = TempPt[X] + NewVec[Y] = TempPt[Y] + NewVec[Z] = sqrt(1.0 - length) + + return NewVec + + def click(self, NewPt): + # Mouse down (Point2fT + self.m_StVec = self._mapToSphere(NewPt) + return + + def drag(self, NewPt): + # Mouse drag, calculate rotation (Point2fT Quat4fT) + """ drag (Point2fT mouse_coord) -> new_quaternion_rotation_vec + """ + X = 0 + Y = 1 + Z = 2 + W = 3 + + self.m_EnVec = self._mapToSphere(NewPt) + + # //Compute the vector perpendicular to the begin and end vectors + # Perp = Vector3fT () + Perp = Vector3fCross(self.m_StVec, self.m_EnVec) + + NewRot = Quat4fT() + # Compute the length of the perpendicular vector + if (Vector3fLength(Perp) > Epsilon): + # if its non-zero + # We're ok, so return the perpendicular vector as the transform after all + NewRot[X] = Perp[X] + NewRot[Y] = Perp[Y] + NewRot[Z] = Perp[Z] + # //In the quaternion values, w is cosine (theta / 2), where theta is rotation angle + NewRot[W] = Vector3fDot(self.m_StVec, self.m_EnVec) + else: + # if its zero + # The begin and end vectors coincide, so return a quaternion of zero matrix (no rotation) + NewRot[X] = NewRot[Y] = NewRot[Z] = NewRot[W] = 0.0 + + return NewRot + + +def Matrix4fT(): + return Numeric.identity(4, 'f') + + +def Matrix3fT(): + return Numeric.identity(3, 'f') + + +def Quat4fT(): + return Numeric.zeros(4, 'f') + + +def Vector3fT(): + return Numeric.zeros(3, 'f') + + +def Point2fT(x=0.0, y=0.0): + pt = Numeric.zeros(2, 'f') + pt[0] = x + pt[1] = y + return pt + + +def Vector3fDot(u, v): + # Dot product of two 3f vectors + dotprod = Numeric.dot(u, v) + return dotprod + + +def Vector3fCross(u, v): + # Cross product of two 3f vectors + X = 0 + Y = 1 + Z = 2 + cross = Numeric.zeros(3, 'f') + cross[X] = (u[Y] * v[Z]) - (u[Z] * v[Y]) + cross[Y] = (u[Z] * v[X]) - (u[X] * v[Z]) + cross[Z] = (u[X] * v[Y]) - (u[Y] * v[X]) + return cross + + +def Vector3fLength(u): + mag_squared = Numeric.sum(Numeric.dot(u, u)) + mag = sqrt(mag_squared) + return mag + + +def Matrix3fSetIdentity(): + return Numeric.identity(3, 'f') + + +def Matrix3fMulMatrix3f(matrix_a, matrix_b): + return matrix_a.dot(matrix_b) + + +def Matrix4fSVD(NewObj): + X = 0 + Y = 1 + Z = 2 + s = sqrt(((NewObj[X][X] * NewObj[X][X]) + (NewObj[X][Y] * NewObj[X][Y]) + (NewObj[X][Z] * NewObj[X][Z]) + + (NewObj[Y][X] * NewObj[Y][X]) + (NewObj[Y][Y] * NewObj[Y][Y]) + (NewObj[Y][Z] * NewObj[Y][Z]) + + (NewObj[Z][X] * NewObj[Z][X]) + (NewObj[Z][Y] * NewObj[Z][Y]) + (NewObj[Z][Z] * NewObj[Z][Z])) / 3.0) + return s + + +def Matrix4fSetRotationScaleFromMatrix3f(NewObj, three_by_three_matrix): + """Modifies NewObj in-place by replacing its upper 3x3 portion from the + passed in 3x3 matrix. + + :param NewObj: a `Matrix4fT` + """ + NewObj[0:3, 0:3] = three_by_three_matrix + return NewObj + + +def Matrix4fSetRotationFromMatrix3f(NewObj, three_by_three_matrix): + """ + Sets the rotational component (upper 3x3) of this matrix to the matrix + values in the T precision Matrix3d argument; the other elements of + this matrix are unchanged; a singular value decomposition is performed + on this object's upper 3x3 matrix to factor out the scale, then this + object's upper 3x3 matrix components are replaced by the passed rotation + components, and then the scale is reapplied to the rotational + components. + + :param three_by_three_matrix: T precision 3x3 matrix + """ + scale = Matrix4fSVD(NewObj) + + NewObj = Matrix4fSetRotationScaleFromMatrix3f(NewObj, three_by_three_matrix) + scaled_NewObj = NewObj * scale # Matrix4fMulRotationScale(NewObj, scale); + return scaled_NewObj + + +def Matrix3fSetRotationFromQuat4f(q1): + """Converts the H quaternion q1 into a new equivalent 3x3 rotation matrix.""" + X = 0 + Y = 1 + Z = 2 + W = 3 + + NewObj = Matrix3fT() + n = Numeric.sum(Numeric.dot(q1, q1)) + s = 0.0 + if (n > 0.0): + s = 2.0 / n + + xs = q1[X] * s + ys = q1[Y] * s + zs = q1[Z] * s + + wx = q1[W] * xs + wy = q1[W] * ys + wz = q1[W] * zs + + xx = q1[X] * xs + xy = q1[X] * ys + xz = q1[X] * zs + + yy = q1[Y] * ys + yz = q1[Y] * zs + zz = q1[Z] * zs + + # This math all comes about by way of algebra, complex math, and trig identities. + # See Lengyel pages 88-92 + NewObj[X][X] = 1.0 - (yy + zz) + NewObj[Y][X] = xy - wz + NewObj[Z][X] = xz + wy + + NewObj[X][Y] = xy + wz + NewObj[Y][Y] = 1.0 - (xx + zz) + NewObj[Z][Y] = yz - wx + + NewObj[X][Z] = xz - wy + NewObj[Y][Z] = yz + wx + NewObj[Z][Z] = 1.0 - (xx + yy) + + return NewObj diff --git a/mesh-master/mesh/cmake/python_helper.cmake b/mesh-master/mesh/cmake/python_helper.cmake new file mode 100644 index 0000000000000000000000000000000000000000..728d7b150bc3fbc5fcf8461570aac440eca6812a --- /dev/null +++ b/mesh-master/mesh/cmake/python_helper.cmake @@ -0,0 +1,74 @@ +# Copyright 2016, Max Planck Society. +# Not licensed +# author Raffi Enficiaud + +# helper file containing commands easing the declaration of python modules + +include(CMakeParseArguments) + + +#.rst: +# .. command:: python_add_library +# +# Adds a shared library that is meant to be a python extension module. +# The added library links to python library and has the proper extension. +# +# :: +# +# python_add_library( +# TARGET targetname +# SOURCES source_list) +# +# ``targetname`` name of the python extension +# ``source_list`` list of source files for this target. +# +function(python_add_library) + + if("${PYTHON_MODULES_EXTENSIONS}" STREQUAL "") + if("${PYTHON_VERSION}" VERSION_GREATER "2.5") + if(UNIX OR MINGW) + set(PYTHON_MODULES_EXTENSIONS_TEMP ".so") + else() + set(PYTHON_MODULES_EXTENSIONS_TEMP ".pyd") + endif() + else() + if(APPLE) + set(PYTHON_MODULES_EXTENSIONS_TEMP ".so") + else() + set(PYTHON_MODULES_EXTENSIONS_TEMP ${CMAKE_SHARED_LIBRARY_SUFFIX}) + endif() + endif() + set(PYTHON_MODULES_EXTENSIONS ${PYTHON_MODULES_EXTENSIONS_TEMP} CACHE STRING "Python modules extension for the current platform") + endif() + + + set(options ) + set(oneValueArgs TARGET) + set(multiValueArgs SOURCES) + cmake_parse_arguments(local_python_add_cmd "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) + + if("${local_python_add_cmd_TARGET}" STREQUAL "") + message(FATAL_ERROR "python_add_library: the TARGET should be specified") + endif() + + if("${local_python_add_cmd_SOURCES}" STREQUAL "") + message(FATAL_ERROR "python_add_library: at least one source file should be specified") + endif() + + add_library(${local_python_add_cmd_TARGET} SHARED + src/hijack_python_headers.hpp # by default + "${local_python_add_cmd_SOURCES}") + target_include_directories(${local_python_add_cmd_TARGET} PRIVATE ${PYTHON_INCLUDE_PATH}) + target_link_libraries(${local_python_add_cmd_TARGET} ${PYTHON_LIBRARY}) # PYTHON_LIBRARIES may contain the debug version of python, which we do not want + set_target_properties(${local_python_add_cmd_TARGET} + PROPERTIES SUFFIX ${PYTHON_MODULES_EXTENSIONS} + PREFIX "") + + if(FALSE) + set_target_properties(${local_python_add_cmd_TARGET} + PROPERTIES + OUTPUT_NAME_DEBUG ${local_python_add_cmd_TARGET}_d + ) + endif() + +endfunction(python_add_library) \ No newline at end of file diff --git a/mesh-master/mesh/cmake/thirdparty.cmake b/mesh-master/mesh/cmake/thirdparty.cmake new file mode 100644 index 0000000000000000000000000000000000000000..e5ae1ed25bd3d68f068b205b3be27effa476b45e --- /dev/null +++ b/mesh-master/mesh/cmake/thirdparty.cmake @@ -0,0 +1,107 @@ +# Copyright 2016, Max Planck Society. +# Not licensed +# author Raffi Enficiaud + +# this file contains all the necessary to link against thirdparty libraries + +# location of the stored thirdparty archives +set(thirdparty_dir ${CMAKE_SOURCE_DIR}/thirdparty) + +# the location where the archives will be deflated, to avoid pollution +# of the source tree +set(thirdparties_deflate_directory ${CMAKE_BINARY_DIR}/external_libs_deflate) +if(NOT EXISTS ${thirdparties_deflate_directory}) + file(MAKE_DIRECTORY ${thirdparties_deflate_directory}) +endif() + +# this module may be used to find system installed libraries on Linux +if(UNIX AND NOT APPLE) + find_package(PkgConfig) +endif() + + +# Python libraries, required +find_package(PythonLibs REQUIRED) +find_package(PythonInterp REQUIRED) + +# numpy +execute_process( + COMMAND ${PYTHON_EXECUTABLE} -c "import numpy; print numpy.get_include()" + OUTPUT_VARIABLE NUMPY_INCLUDE_PATH + ERROR_VARIABLE NUMPY_ERROR + OUTPUT_STRIP_TRAILING_WHITESPACE + ) +if(NOT (NUMPY_ERROR STREQUAL "") OR (NUMPY_INCLUDE_PATH STREQUAL "")) + message(FATAL_ERROR "[numpy] the following error occured: ${NUMPY_ERROR} - Consider setting PYTHON_ROOT in the environment") +endif() +message(STATUS "[numpy] found headers in ${NUMPY_INCLUDE_PATH}") + + +# cgal +set(CGAL_MAJOR_VERSION 4) +set(CGAL_MINOR_VERSION 7) +set(CGAL_VERSION ${CGAL_MAJOR_VERSION}.${CGAL_MINOR_VERSION}) +set(LIBCGAL CGAL-${CGAL_MAJOR_VERSION}.${CGAL_MINOR_VERSION}) +set(libcgalroot ${thirdparties_deflate_directory}/${LIBCGAL}) + +if(NOT EXISTS ${libcgalroot}) + message(STATUS "Untarring ${LIBCGAL}") + execute_process( + COMMAND ${CMAKE_COMMAND} -E tar xzf ${thirdparty_dir}/${LIBCGAL}.tar.gz + WORKING_DIRECTORY ${thirdparties_deflate_directory}) +endif() + +if(NOT EXISTS ${libcgalroot}/include/CGAL/compiler_config.h) + message(STATUS "[CGAL] creating empty configuration file for header only compilation") + file(WRITE + ${libcgalroot}/include/CGAL/compiler_config.h + "// automatically generated by mesh cmake") +endif() + + + +# boost (needed by CGAL) +if(NOT BOOST_ROOT) + message(STATUS "[BOOST] Boost root not configured, taking the system boost version") + set(MESH_BOOST_FROM_SYSTEM TRUE) +else() + message(STATUS "[BOOST] Boost root directory set to ${BOOST_ROOT}") + set(MESH_BOOST_FROM_SYSTEM FALSE) +endif() + +if(UNIX AND NOT APPLE AND NOT MESH_BOOST_FROM_SYSTEM) + message(WARNING "[BOOST] you are setting a different boost than the one provided by the system. This option should be taken with care.") +endif() + +set(Boost_ADDITIONAL_VERSIONS + "1.54" "1.54.0" "1.55" "1.55.0" "1.56" "1.56.0" "1.57.0" "1.58" "1.58.0" "1.59" "1.59.0" + "1.60" "1.60.0" "1.61" "1.61.0") + + +add_definitions(-DBOOST_ALL_NO_LIB) # disable auto link +set(Boost_USE_STATIC_LIBS OFF) # linking with static library version (not used because of the header only) +if(NOT Boost_USE_STATIC_LIBS) + # link against dynamic libraries + add_definitions(-DBOOST_ALL_DYN_LINK) +endif() + +# if we are using the system version, we do not want to have the exact version embedded in the rpath/ldd +if(MESH_BOOST_FROM_SYSTEM) + set(Boost_REALPATH OFF) +else() + set(Boost_REALPATH ON) +endif() + +set(Boost_USE_MULTITHREADED ON) +set(Boost_DEBUG ON) +set(Boost_DETAILED_FAILURE_MSG ON) +if(DEFINED BOOST_ROOT) + set(Boost_NO_SYSTEM_PATHS ON) +else() + set(Boost_NO_SYSTEM_PATHS OFF) +endif() +set(Boost_NO_BOOST_CMAKE ON) + +# only the header locations +find_package(Boost 1.59) + diff --git a/mesh-master/mesh/colors.py b/mesh-master/mesh/colors.py new file mode 100644 index 0000000000000000000000000000000000000000..534d68766ded3e1d4c5f06e7ba19a485954907bd --- /dev/null +++ b/mesh-master/mesh/colors.py @@ -0,0 +1,790 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Copyright (c) 2012 Max Planck Society. All rights reserved. +# Created by Matthew Loper on 2012-05-12. + +""" +Colors utilities +================ + + +""" + +import re +import numpy as np + + +def main(): + """Generates code for name_to_rgb dict, assuming an rgb.txt file available (in X11 format).""" + with open('rgb.txt') as fp: + + line = fp.readline() + while line: + reg = re.match('\s*(\d+)\s*(\d+)\s*(\d+)\s*(\w.*\w).*', line) + if reg: + r = int(reg.group(1)) / 255. + g = int(reg.group(2)) / 255. + b = int(reg.group(3)) / 255. + d = reg.group(4) + print("'%s': np.array([%.2f, %.2f, %.2f])," % (d, r, g, b)) + line = fp.readline() + + +name_to_rgb = { + 'snow': np.array([1.00, 0.98, 0.98]), + 'ghost white': np.array([0.97, 0.97, 1.00]), + 'GhostWhite': np.array([0.97, 0.97, 1.00]), + 'white smoke': np.array([0.96, 0.96, 0.96]), + 'WhiteSmoke': np.array([0.96, 0.96, 0.96]), + 'gainsboro': np.array([0.86, 0.86, 0.86]), + 'floral white': np.array([1.00, 0.98, 0.94]), + 'FloralWhite': np.array([1.00, 0.98, 0.94]), + 'old lace': np.array([0.99, 0.96, 0.90]), + 'OldLace': np.array([0.99, 0.96, 0.90]), + 'linen': np.array([0.98, 0.94, 0.90]), + 'antique white': np.array([0.98, 0.92, 0.84]), + 'AntiqueWhite': np.array([0.98, 0.92, 0.84]), + 'papaya whip': np.array([1.00, 0.94, 0.84]), + 'PapayaWhip': np.array([1.00, 0.94, 0.84]), + 'blanched almond': np.array([1.00, 0.92, 0.80]), + 'BlanchedAlmond': np.array([1.00, 0.92, 0.80]), + 'bisque': np.array([1.00, 0.89, 0.77]), + 'peach puff': np.array([1.00, 0.85, 0.73]), + 'PeachPuff': np.array([1.00, 0.85, 0.73]), + 'navajo white': np.array([1.00, 0.87, 0.68]), + 'NavajoWhite': np.array([1.00, 0.87, 0.68]), + 'moccasin': np.array([1.00, 0.89, 0.71]), + 'cornsilk': np.array([1.00, 0.97, 0.86]), + 'ivory': np.array([1.00, 1.00, 0.94]), + 'lemon chiffon': np.array([1.00, 0.98, 0.80]), + 'LemonChiffon': np.array([1.00, 0.98, 0.80]), + 'seashell': np.array([1.00, 0.96, 0.93]), + 'honeydew': np.array([0.94, 1.00, 0.94]), + 'mint cream': np.array([0.96, 1.00, 0.98]), + 'MintCream': np.array([0.96, 1.00, 0.98]), + 'azure': np.array([0.94, 1.00, 1.00]), + 'alice blue': np.array([0.94, 0.97, 1.00]), + 'AliceBlue': np.array([0.94, 0.97, 1.00]), + 'lavender': np.array([0.90, 0.90, 0.98]), + 'lavender blush': np.array([1.00, 0.94, 0.96]), + 'LavenderBlush': np.array([1.00, 0.94, 0.96]), + 'misty rose': np.array([1.00, 0.89, 0.88]), + 'MistyRose': np.array([1.00, 0.89, 0.88]), + 'white': np.array([1.00, 1.00, 1.00]), + 'black': np.array([0.00, 0.00, 0.00]), + 'dark slate gray': np.array([0.18, 0.31, 0.31]), + 'DarkSlateGray': np.array([0.18, 0.31, 0.31]), + 'dark slate grey': np.array([0.18, 0.31, 0.31]), + 'DarkSlateGrey': np.array([0.18, 0.31, 0.31]), + 'dim gray': np.array([0.41, 0.41, 0.41]), + 'DimGray': np.array([0.41, 0.41, 0.41]), + 'dim grey': np.array([0.41, 0.41, 0.41]), + 'DimGrey': np.array([0.41, 0.41, 0.41]), + 'slate gray': np.array([0.44, 0.50, 0.56]), + 'SlateGray': np.array([0.44, 0.50, 0.56]), + 'slate grey': np.array([0.44, 0.50, 0.56]), + 'SlateGrey': np.array([0.44, 0.50, 0.56]), + 'light slate gray': np.array([0.47, 0.53, 0.60]), + 'LightSlateGray': np.array([0.47, 0.53, 0.60]), + 'light slate grey': np.array([0.47, 0.53, 0.60]), + 'LightSlateGrey': np.array([0.47, 0.53, 0.60]), + 'gray': np.array([0.75, 0.75, 0.75]), + 'grey': np.array([0.75, 0.75, 0.75]), + 'light grey': np.array([0.83, 0.83, 0.83]), + 'LightGrey': np.array([0.83, 0.83, 0.83]), + 'light gray': np.array([0.83, 0.83, 0.83]), + 'LightGray': np.array([0.83, 0.83, 0.83]), + 'midnight blue': np.array([0.10, 0.10, 0.44]), + 'MidnightBlue': np.array([0.10, 0.10, 0.44]), + 'navy': np.array([0.00, 0.00, 0.50]), + 'navy blue': np.array([0.00, 0.00, 0.50]), + 'NavyBlue': np.array([0.00, 0.00, 0.50]), + 'cornflower blue': np.array([0.39, 0.58, 0.93]), + 'CornflowerBlue': np.array([0.39, 0.58, 0.93]), + 'dark slate blue': np.array([0.28, 0.24, 0.55]), + 'DarkSlateBlue': np.array([0.28, 0.24, 0.55]), + 'slate blue': np.array([0.42, 0.35, 0.80]), + 'SlateBlue': np.array([0.42, 0.35, 0.80]), + 'medium slate blue': np.array([0.48, 0.41, 0.93]), + 'MediumSlateBlue': np.array([0.48, 0.41, 0.93]), + 'light slate blue': np.array([0.52, 0.44, 1.00]), + 'LightSlateBlue': np.array([0.52, 0.44, 1.00]), + 'medium blue': np.array([0.00, 0.00, 0.80]), + 'MediumBlue': np.array([0.00, 0.00, 0.80]), + 'royal blue': np.array([0.25, 0.41, 0.88]), + 'RoyalBlue': np.array([0.25, 0.41, 0.88]), + 'blue': np.array([0.00, 0.00, 1.00]), + 'dodger blue': np.array([0.12, 0.56, 1.00]), + 'DodgerBlue': np.array([0.12, 0.56, 1.00]), + 'deep sky blue': np.array([0.00, 0.75, 1.00]), + 'DeepSkyBlue': np.array([0.00, 0.75, 1.00]), + 'sky blue': np.array([0.53, 0.81, 0.92]), + 'SkyBlue': np.array([0.53, 0.81, 0.92]), + 'light sky blue': np.array([0.53, 0.81, 0.98]), + 'LightSkyBlue': np.array([0.53, 0.81, 0.98]), + 'steel blue': np.array([0.27, 0.51, 0.71]), + 'SteelBlue': np.array([0.27, 0.51, 0.71]), + 'light steel blue': np.array([0.69, 0.77, 0.87]), + 'LightSteelBlue': np.array([0.69, 0.77, 0.87]), + 'light blue': np.array([0.68, 0.85, 0.90]), + 'LightBlue': np.array([0.68, 0.85, 0.90]), + 'powder blue': np.array([0.69, 0.88, 0.90]), + 'PowderBlue': np.array([0.69, 0.88, 0.90]), + 'pale turquoise': np.array([0.69, 0.93, 0.93]), + 'PaleTurquoise': np.array([0.69, 0.93, 0.93]), + 'dark turquoise': np.array([0.00, 0.81, 0.82]), + 'DarkTurquoise': np.array([0.00, 0.81, 0.82]), + 'medium turquoise': np.array([0.28, 0.82, 0.80]), + 'MediumTurquoise': np.array([0.28, 0.82, 0.80]), + 'turquoise': np.array([0.25, 0.88, 0.82]), + 'cyan': np.array([0.00, 1.00, 1.00]), + 'light cyan': np.array([0.88, 1.00, 1.00]), + 'LightCyan': np.array([0.88, 1.00, 1.00]), + 'cadet blue': np.array([0.37, 0.62, 0.63]), + 'CadetBlue': np.array([0.37, 0.62, 0.63]), + 'medium aquamarine': np.array([0.40, 0.80, 0.67]), + 'MediumAquamarine': np.array([0.40, 0.80, 0.67]), + 'aquamarine': np.array([0.50, 1.00, 0.83]), + 'dark green': np.array([0.00, 0.39, 0.00]), + 'DarkGreen': np.array([0.00, 0.39, 0.00]), + 'dark olive green': np.array([0.33, 0.42, 0.18]), + 'DarkOliveGreen': np.array([0.33, 0.42, 0.18]), + 'dark sea green': np.array([0.56, 0.74, 0.56]), + 'DarkSeaGreen': np.array([0.56, 0.74, 0.56]), + 'sea green': np.array([0.18, 0.55, 0.34]), + 'SeaGreen': np.array([0.18, 0.55, 0.34]), + 'medium sea green': np.array([0.24, 0.70, 0.44]), + 'MediumSeaGreen': np.array([0.24, 0.70, 0.44]), + 'light sea green': np.array([0.13, 0.70, 0.67]), + 'LightSeaGreen': np.array([0.13, 0.70, 0.67]), + 'pale green': np.array([0.60, 0.98, 0.60]), + 'PaleGreen': np.array([0.60, 0.98, 0.60]), + 'spring green': np.array([0.00, 1.00, 0.50]), + 'SpringGreen': np.array([0.00, 1.00, 0.50]), + 'lawn green': np.array([0.49, 0.99, 0.00]), + 'LawnGreen': np.array([0.49, 0.99, 0.00]), + 'green': np.array([0.00, 1.00, 0.00]), + 'chartreuse': np.array([0.50, 1.00, 0.00]), + 'medium spring green': np.array([0.00, 0.98, 0.60]), + 'MediumSpringGreen': np.array([0.00, 0.98, 0.60]), + 'green yellow': np.array([0.68, 1.00, 0.18]), + 'GreenYellow': np.array([0.68, 1.00, 0.18]), + 'lime green': np.array([0.20, 0.80, 0.20]), + 'LimeGreen': np.array([0.20, 0.80, 0.20]), + 'yellow green': np.array([0.60, 0.80, 0.20]), + 'YellowGreen': np.array([0.60, 0.80, 0.20]), + 'forest green': np.array([0.13, 0.55, 0.13]), + 'ForestGreen': np.array([0.13, 0.55, 0.13]), + 'olive drab': np.array([0.42, 0.56, 0.14]), + 'OliveDrab': np.array([0.42, 0.56, 0.14]), + 'dark khaki': np.array([0.74, 0.72, 0.42]), + 'DarkKhaki': np.array([0.74, 0.72, 0.42]), + 'khaki': np.array([0.94, 0.90, 0.55]), + 'pale goldenrod': np.array([0.93, 0.91, 0.67]), + 'PaleGoldenrod': np.array([0.93, 0.91, 0.67]), + 'light goldenrod yellow': np.array([0.98, 0.98, 0.82]), + 'LightGoldenrodYellow': np.array([0.98, 0.98, 0.82]), + 'light yellow': np.array([1.00, 1.00, 0.88]), + 'LightYellow': np.array([1.00, 1.00, 0.88]), + 'yellow': np.array([1.00, 1.00, 0.00]), + 'gold': np.array([1.00, 0.84, 0.00]), + 'light goldenrod': np.array([0.93, 0.87, 0.51]), + 'LightGoldenrod': np.array([0.93, 0.87, 0.51]), + 'goldenrod': np.array([0.85, 0.65, 0.13]), + 'dark goldenrod': np.array([0.72, 0.53, 0.04]), + 'DarkGoldenrod': np.array([0.72, 0.53, 0.04]), + 'rosy brown': np.array([0.74, 0.56, 0.56]), + 'RosyBrown': np.array([0.74, 0.56, 0.56]), + 'indian red': np.array([0.80, 0.36, 0.36]), + 'IndianRed': np.array([0.80, 0.36, 0.36]), + 'saddle brown': np.array([0.55, 0.27, 0.07]), + 'SaddleBrown': np.array([0.55, 0.27, 0.07]), + 'sienna': np.array([0.63, 0.32, 0.18]), + 'peru': np.array([0.80, 0.52, 0.25]), + 'burlywood': np.array([0.87, 0.72, 0.53]), + 'beige': np.array([0.96, 0.96, 0.86]), + 'wheat': np.array([0.96, 0.87, 0.70]), + 'sandy brown': np.array([0.96, 0.64, 0.38]), + 'SandyBrown': np.array([0.96, 0.64, 0.38]), + 'tan': np.array([0.82, 0.71, 0.55]), + 'chocolate': np.array([0.82, 0.41, 0.12]), + 'firebrick': np.array([0.70, 0.13, 0.13]), + 'brown': np.array([0.65, 0.16, 0.16]), + 'dark salmon': np.array([0.91, 0.59, 0.48]), + 'DarkSalmon': np.array([0.91, 0.59, 0.48]), + 'salmon': np.array([0.98, 0.50, 0.45]), + 'light salmon': np.array([1.00, 0.63, 0.48]), + 'LightSalmon': np.array([1.00, 0.63, 0.48]), + 'orange': np.array([1.00, 0.65, 0.00]), + 'dark orange': np.array([1.00, 0.55, 0.00]), + 'DarkOrange': np.array([1.00, 0.55, 0.00]), + 'coral': np.array([1.00, 0.50, 0.31]), + 'light coral': np.array([0.94, 0.50, 0.50]), + 'LightCoral': np.array([0.94, 0.50, 0.50]), + 'tomato': np.array([1.00, 0.39, 0.28]), + 'orange red': np.array([1.00, 0.27, 0.00]), + 'OrangeRed': np.array([1.00, 0.27, 0.00]), + 'red': np.array([1.00, 0.00, 0.00]), + 'hot pink': np.array([1.00, 0.41, 0.71]), + 'HotPink': np.array([1.00, 0.41, 0.71]), + 'deep pink': np.array([1.00, 0.08, 0.58]), + 'DeepPink': np.array([1.00, 0.08, 0.58]), + 'pink': np.array([1.00, 0.75, 0.80]), + 'light pink': np.array([1.00, 0.71, 0.76]), + 'LightPink': np.array([1.00, 0.71, 0.76]), + 'pale violet red': np.array([0.86, 0.44, 0.58]), + 'PaleVioletRed': np.array([0.86, 0.44, 0.58]), + 'maroon': np.array([0.69, 0.19, 0.38]), + 'medium violet red': np.array([0.78, 0.08, 0.52]), + 'MediumVioletRed': np.array([0.78, 0.08, 0.52]), + 'violet red': np.array([0.82, 0.13, 0.56]), + 'VioletRed': np.array([0.82, 0.13, 0.56]), + 'magenta': np.array([1.00, 0.00, 1.00]), + 'violet': np.array([0.93, 0.51, 0.93]), + 'plum': np.array([0.87, 0.63, 0.87]), + 'orchid': np.array([0.85, 0.44, 0.84]), + 'medium orchid': np.array([0.73, 0.33, 0.83]), + 'MediumOrchid': np.array([0.73, 0.33, 0.83]), + 'dark orchid': np.array([0.60, 0.20, 0.80]), + 'DarkOrchid': np.array([0.60, 0.20, 0.80]), + 'dark violet': np.array([0.58, 0.00, 0.83]), + 'DarkViolet': np.array([0.58, 0.00, 0.83]), + 'blue violet': np.array([0.54, 0.17, 0.89]), + 'BlueViolet': np.array([0.54, 0.17, 0.89]), + 'purple': np.array([0.63, 0.13, 0.94]), + 'medium purple': np.array([0.58, 0.44, 0.86]), + 'MediumPurple': np.array([0.58, 0.44, 0.86]), + 'thistle': np.array([0.85, 0.75, 0.85]), + 'snow1': np.array([1.00, 0.98, 0.98]), + 'snow2': np.array([0.93, 0.91, 0.91]), + 'snow3': np.array([0.80, 0.79, 0.79]), + 'snow4': np.array([0.55, 0.54, 0.54]), + 'seashell1': np.array([1.00, 0.96, 0.93]), + 'seashell2': np.array([0.93, 0.90, 0.87]), + 'seashell3': np.array([0.80, 0.77, 0.75]), + 'seashell4': np.array([0.55, 0.53, 0.51]), + 'AntiqueWhite1': np.array([1.00, 0.94, 0.86]), + 'AntiqueWhite2': np.array([0.93, 0.87, 0.80]), + 'AntiqueWhite3': np.array([0.80, 0.75, 0.69]), + 'AntiqueWhite4': np.array([0.55, 0.51, 0.47]), + 'bisque1': np.array([1.00, 0.89, 0.77]), + 'bisque2': np.array([0.93, 0.84, 0.72]), + 'bisque3': np.array([0.80, 0.72, 0.62]), + 'bisque4': np.array([0.55, 0.49, 0.42]), + 'PeachPuff1': np.array([1.00, 0.85, 0.73]), + 'PeachPuff2': np.array([0.93, 0.80, 0.68]), + 'PeachPuff3': np.array([0.80, 0.69, 0.58]), + 'PeachPuff4': np.array([0.55, 0.47, 0.40]), + 'NavajoWhite1': np.array([1.00, 0.87, 0.68]), + 'NavajoWhite2': np.array([0.93, 0.81, 0.63]), + 'NavajoWhite3': np.array([0.80, 0.70, 0.55]), + 'NavajoWhite4': np.array([0.55, 0.47, 0.37]), + 'LemonChiffon1': np.array([1.00, 0.98, 0.80]), + 'LemonChiffon2': np.array([0.93, 0.91, 0.75]), + 'LemonChiffon3': np.array([0.80, 0.79, 0.65]), + 'LemonChiffon4': np.array([0.55, 0.54, 0.44]), + 'cornsilk1': np.array([1.00, 0.97, 0.86]), + 'cornsilk2': np.array([0.93, 0.91, 0.80]), + 'cornsilk3': np.array([0.80, 0.78, 0.69]), + 'cornsilk4': np.array([0.55, 0.53, 0.47]), + 'ivory1': np.array([1.00, 1.00, 0.94]), + 'ivory2': np.array([0.93, 0.93, 0.88]), + 'ivory3': np.array([0.80, 0.80, 0.76]), + 'ivory4': np.array([0.55, 0.55, 0.51]), + 'honeydew1': np.array([0.94, 1.00, 0.94]), + 'honeydew2': np.array([0.88, 0.93, 0.88]), + 'honeydew3': np.array([0.76, 0.80, 0.76]), + 'honeydew4': np.array([0.51, 0.55, 0.51]), + 'LavenderBlush1': np.array([1.00, 0.94, 0.96]), + 'LavenderBlush2': np.array([0.93, 0.88, 0.90]), + 'LavenderBlush3': np.array([0.80, 0.76, 0.77]), + 'LavenderBlush4': np.array([0.55, 0.51, 0.53]), + 'MistyRose1': np.array([1.00, 0.89, 0.88]), + 'MistyRose2': np.array([0.93, 0.84, 0.82]), + 'MistyRose3': np.array([0.80, 0.72, 0.71]), + 'MistyRose4': np.array([0.55, 0.49, 0.48]), + 'azure1': np.array([0.94, 1.00, 1.00]), + 'azure2': np.array([0.88, 0.93, 0.93]), + 'azure3': np.array([0.76, 0.80, 0.80]), + 'azure4': np.array([0.51, 0.55, 0.55]), + 'SlateBlue1': np.array([0.51, 0.44, 1.00]), + 'SlateBlue2': np.array([0.48, 0.40, 0.93]), + 'SlateBlue3': np.array([0.41, 0.35, 0.80]), + 'SlateBlue4': np.array([0.28, 0.24, 0.55]), + 'RoyalBlue1': np.array([0.28, 0.46, 1.00]), + 'RoyalBlue2': np.array([0.26, 0.43, 0.93]), + 'RoyalBlue3': np.array([0.23, 0.37, 0.80]), + 'RoyalBlue4': np.array([0.15, 0.25, 0.55]), + 'blue1': np.array([0.00, 0.00, 1.00]), + 'blue2': np.array([0.00, 0.00, 0.93]), + 'blue3': np.array([0.00, 0.00, 0.80]), + 'blue4': np.array([0.00, 0.00, 0.55]), + 'DodgerBlue1': np.array([0.12, 0.56, 1.00]), + 'DodgerBlue2': np.array([0.11, 0.53, 0.93]), + 'DodgerBlue3': np.array([0.09, 0.45, 0.80]), + 'DodgerBlue4': np.array([0.06, 0.31, 0.55]), + 'SteelBlue1': np.array([0.39, 0.72, 1.00]), + 'SteelBlue2': np.array([0.36, 0.67, 0.93]), + 'SteelBlue3': np.array([0.31, 0.58, 0.80]), + 'SteelBlue4': np.array([0.21, 0.39, 0.55]), + 'DeepSkyBlue1': np.array([0.00, 0.75, 1.00]), + 'DeepSkyBlue2': np.array([0.00, 0.70, 0.93]), + 'DeepSkyBlue3': np.array([0.00, 0.60, 0.80]), + 'DeepSkyBlue4': np.array([0.00, 0.41, 0.55]), + 'SkyBlue1': np.array([0.53, 0.81, 1.00]), + 'SkyBlue2': np.array([0.49, 0.75, 0.93]), + 'SkyBlue3': np.array([0.42, 0.65, 0.80]), + 'SkyBlue4': np.array([0.29, 0.44, 0.55]), + 'LightSkyBlue1': np.array([0.69, 0.89, 1.00]), + 'LightSkyBlue2': np.array([0.64, 0.83, 0.93]), + 'LightSkyBlue3': np.array([0.55, 0.71, 0.80]), + 'LightSkyBlue4': np.array([0.38, 0.48, 0.55]), + 'SlateGray1': np.array([0.78, 0.89, 1.00]), + 'SlateGray2': np.array([0.73, 0.83, 0.93]), + 'SlateGray3': np.array([0.62, 0.71, 0.80]), + 'SlateGray4': np.array([0.42, 0.48, 0.55]), + 'LightSteelBlue1': np.array([0.79, 0.88, 1.00]), + 'LightSteelBlue2': np.array([0.74, 0.82, 0.93]), + 'LightSteelBlue3': np.array([0.64, 0.71, 0.80]), + 'LightSteelBlue4': np.array([0.43, 0.48, 0.55]), + 'LightBlue1': np.array([0.75, 0.94, 1.00]), + 'LightBlue2': np.array([0.70, 0.87, 0.93]), + 'LightBlue3': np.array([0.60, 0.75, 0.80]), + 'LightBlue4': np.array([0.41, 0.51, 0.55]), + 'LightCyan1': np.array([0.88, 1.00, 1.00]), + 'LightCyan2': np.array([0.82, 0.93, 0.93]), + 'LightCyan3': np.array([0.71, 0.80, 0.80]), + 'LightCyan4': np.array([0.48, 0.55, 0.55]), + 'PaleTurquoise1': np.array([0.73, 1.00, 1.00]), + 'PaleTurquoise2': np.array([0.68, 0.93, 0.93]), + 'PaleTurquoise3': np.array([0.59, 0.80, 0.80]), + 'PaleTurquoise4': np.array([0.40, 0.55, 0.55]), + 'CadetBlue1': np.array([0.60, 0.96, 1.00]), + 'CadetBlue2': np.array([0.56, 0.90, 0.93]), + 'CadetBlue3': np.array([0.48, 0.77, 0.80]), + 'CadetBlue4': np.array([0.33, 0.53, 0.55]), + 'turquoise1': np.array([0.00, 0.96, 1.00]), + 'turquoise2': np.array([0.00, 0.90, 0.93]), + 'turquoise3': np.array([0.00, 0.77, 0.80]), + 'turquoise4': np.array([0.00, 0.53, 0.55]), + 'cyan1': np.array([0.00, 1.00, 1.00]), + 'cyan2': np.array([0.00, 0.93, 0.93]), + 'cyan3': np.array([0.00, 0.80, 0.80]), + 'cyan4': np.array([0.00, 0.55, 0.55]), + 'DarkSlateGray1': np.array([0.59, 1.00, 1.00]), + 'DarkSlateGray2': np.array([0.55, 0.93, 0.93]), + 'DarkSlateGray3': np.array([0.47, 0.80, 0.80]), + 'DarkSlateGray4': np.array([0.32, 0.55, 0.55]), + 'aquamarine1': np.array([0.50, 1.00, 0.83]), + 'aquamarine2': np.array([0.46, 0.93, 0.78]), + 'aquamarine3': np.array([0.40, 0.80, 0.67]), + 'aquamarine4': np.array([0.27, 0.55, 0.45]), + 'DarkSeaGreen1': np.array([0.76, 1.00, 0.76]), + 'DarkSeaGreen2': np.array([0.71, 0.93, 0.71]), + 'DarkSeaGreen3': np.array([0.61, 0.80, 0.61]), + 'DarkSeaGreen4': np.array([0.41, 0.55, 0.41]), + 'SeaGreen1': np.array([0.33, 1.00, 0.62]), + 'SeaGreen2': np.array([0.31, 0.93, 0.58]), + 'SeaGreen3': np.array([0.26, 0.80, 0.50]), + 'SeaGreen4': np.array([0.18, 0.55, 0.34]), + 'PaleGreen1': np.array([0.60, 1.00, 0.60]), + 'PaleGreen2': np.array([0.56, 0.93, 0.56]), + 'PaleGreen3': np.array([0.49, 0.80, 0.49]), + 'PaleGreen4': np.array([0.33, 0.55, 0.33]), + 'SpringGreen1': np.array([0.00, 1.00, 0.50]), + 'SpringGreen2': np.array([0.00, 0.93, 0.46]), + 'SpringGreen3': np.array([0.00, 0.80, 0.40]), + 'SpringGreen4': np.array([0.00, 0.55, 0.27]), + 'green1': np.array([0.00, 1.00, 0.00]), + 'green2': np.array([0.00, 0.93, 0.00]), + 'green3': np.array([0.00, 0.80, 0.00]), + 'green4': np.array([0.00, 0.55, 0.00]), + 'chartreuse1': np.array([0.50, 1.00, 0.00]), + 'chartreuse2': np.array([0.46, 0.93, 0.00]), + 'chartreuse3': np.array([0.40, 0.80, 0.00]), + 'chartreuse4': np.array([0.27, 0.55, 0.00]), + 'OliveDrab1': np.array([0.75, 1.00, 0.24]), + 'OliveDrab2': np.array([0.70, 0.93, 0.23]), + 'OliveDrab3': np.array([0.60, 0.80, 0.20]), + 'OliveDrab4': np.array([0.41, 0.55, 0.13]), + 'DarkOliveGreen1': np.array([0.79, 1.00, 0.44]), + 'DarkOliveGreen2': np.array([0.74, 0.93, 0.41]), + 'DarkOliveGreen3': np.array([0.64, 0.80, 0.35]), + 'DarkOliveGreen4': np.array([0.43, 0.55, 0.24]), + 'khaki1': np.array([1.00, 0.96, 0.56]), + 'khaki2': np.array([0.93, 0.90, 0.52]), + 'khaki3': np.array([0.80, 0.78, 0.45]), + 'khaki4': np.array([0.55, 0.53, 0.31]), + 'LightGoldenrod1': np.array([1.00, 0.93, 0.55]), + 'LightGoldenrod2': np.array([0.93, 0.86, 0.51]), + 'LightGoldenrod3': np.array([0.80, 0.75, 0.44]), + 'LightGoldenrod4': np.array([0.55, 0.51, 0.30]), + 'LightYellow1': np.array([1.00, 1.00, 0.88]), + 'LightYellow2': np.array([0.93, 0.93, 0.82]), + 'LightYellow3': np.array([0.80, 0.80, 0.71]), + 'LightYellow4': np.array([0.55, 0.55, 0.48]), + 'yellow1': np.array([1.00, 1.00, 0.00]), + 'yellow2': np.array([0.93, 0.93, 0.00]), + 'yellow3': np.array([0.80, 0.80, 0.00]), + 'yellow4': np.array([0.55, 0.55, 0.00]), + 'gold1': np.array([1.00, 0.84, 0.00]), + 'gold2': np.array([0.93, 0.79, 0.00]), + 'gold3': np.array([0.80, 0.68, 0.00]), + 'gold4': np.array([0.55, 0.46, 0.00]), + 'goldenrod1': np.array([1.00, 0.76, 0.15]), + 'goldenrod2': np.array([0.93, 0.71, 0.13]), + 'goldenrod3': np.array([0.80, 0.61, 0.11]), + 'goldenrod4': np.array([0.55, 0.41, 0.08]), + 'DarkGoldenrod1': np.array([1.00, 0.73, 0.06]), + 'DarkGoldenrod2': np.array([0.93, 0.68, 0.05]), + 'DarkGoldenrod3': np.array([0.80, 0.58, 0.05]), + 'DarkGoldenrod4': np.array([0.55, 0.40, 0.03]), + 'RosyBrown1': np.array([1.00, 0.76, 0.76]), + 'RosyBrown2': np.array([0.93, 0.71, 0.71]), + 'RosyBrown3': np.array([0.80, 0.61, 0.61]), + 'RosyBrown4': np.array([0.55, 0.41, 0.41]), + 'IndianRed1': np.array([1.00, 0.42, 0.42]), + 'IndianRed2': np.array([0.93, 0.39, 0.39]), + 'IndianRed3': np.array([0.80, 0.33, 0.33]), + 'IndianRed4': np.array([0.55, 0.23, 0.23]), + 'sienna1': np.array([1.00, 0.51, 0.28]), + 'sienna2': np.array([0.93, 0.47, 0.26]), + 'sienna3': np.array([0.80, 0.41, 0.22]), + 'sienna4': np.array([0.55, 0.28, 0.15]), + 'burlywood1': np.array([1.00, 0.83, 0.61]), + 'burlywood2': np.array([0.93, 0.77, 0.57]), + 'burlywood3': np.array([0.80, 0.67, 0.49]), + 'burlywood4': np.array([0.55, 0.45, 0.33]), + 'wheat1': np.array([1.00, 0.91, 0.73]), + 'wheat2': np.array([0.93, 0.85, 0.68]), + 'wheat3': np.array([0.80, 0.73, 0.59]), + 'wheat4': np.array([0.55, 0.49, 0.40]), + 'tan1': np.array([1.00, 0.65, 0.31]), + 'tan2': np.array([0.93, 0.60, 0.29]), + 'tan3': np.array([0.80, 0.52, 0.25]), + 'tan4': np.array([0.55, 0.35, 0.17]), + 'chocolate1': np.array([1.00, 0.50, 0.14]), + 'chocolate2': np.array([0.93, 0.46, 0.13]), + 'chocolate3': np.array([0.80, 0.40, 0.11]), + 'chocolate4': np.array([0.55, 0.27, 0.07]), + 'firebrick1': np.array([1.00, 0.19, 0.19]), + 'firebrick2': np.array([0.93, 0.17, 0.17]), + 'firebrick3': np.array([0.80, 0.15, 0.15]), + 'firebrick4': np.array([0.55, 0.10, 0.10]), + 'brown1': np.array([1.00, 0.25, 0.25]), + 'brown2': np.array([0.93, 0.23, 0.23]), + 'brown3': np.array([0.80, 0.20, 0.20]), + 'brown4': np.array([0.55, 0.14, 0.14]), + 'salmon1': np.array([1.00, 0.55, 0.41]), + 'salmon2': np.array([0.93, 0.51, 0.38]), + 'salmon3': np.array([0.80, 0.44, 0.33]), + 'salmon4': np.array([0.55, 0.30, 0.22]), + 'LightSalmon1': np.array([1.00, 0.63, 0.48]), + 'LightSalmon2': np.array([0.93, 0.58, 0.45]), + 'LightSalmon3': np.array([0.80, 0.51, 0.38]), + 'LightSalmon4': np.array([0.55, 0.34, 0.26]), + 'orange1': np.array([1.00, 0.65, 0.00]), + 'orange2': np.array([0.93, 0.60, 0.00]), + 'orange3': np.array([0.80, 0.52, 0.00]), + 'orange4': np.array([0.55, 0.35, 0.00]), + 'DarkOrange1': np.array([1.00, 0.50, 0.00]), + 'DarkOrange2': np.array([0.93, 0.46, 0.00]), + 'DarkOrange3': np.array([0.80, 0.40, 0.00]), + 'DarkOrange4': np.array([0.55, 0.27, 0.00]), + 'coral1': np.array([1.00, 0.45, 0.34]), + 'coral2': np.array([0.93, 0.42, 0.31]), + 'coral3': np.array([0.80, 0.36, 0.27]), + 'coral4': np.array([0.55, 0.24, 0.18]), + 'tomato1': np.array([1.00, 0.39, 0.28]), + 'tomato2': np.array([0.93, 0.36, 0.26]), + 'tomato3': np.array([0.80, 0.31, 0.22]), + 'tomato4': np.array([0.55, 0.21, 0.15]), + 'OrangeRed1': np.array([1.00, 0.27, 0.00]), + 'OrangeRed2': np.array([0.93, 0.25, 0.00]), + 'OrangeRed3': np.array([0.80, 0.22, 0.00]), + 'OrangeRed4': np.array([0.55, 0.15, 0.00]), + 'red1': np.array([1.00, 0.00, 0.00]), + 'red2': np.array([0.93, 0.00, 0.00]), + 'red3': np.array([0.80, 0.00, 0.00]), + 'red4': np.array([0.55, 0.00, 0.00]), + 'DeepPink1': np.array([1.00, 0.08, 0.58]), + 'DeepPink2': np.array([0.93, 0.07, 0.54]), + 'DeepPink3': np.array([0.80, 0.06, 0.46]), + 'DeepPink4': np.array([0.55, 0.04, 0.31]), + 'HotPink1': np.array([1.00, 0.43, 0.71]), + 'HotPink2': np.array([0.93, 0.42, 0.65]), + 'HotPink3': np.array([0.80, 0.38, 0.56]), + 'HotPink4': np.array([0.55, 0.23, 0.38]), + 'pink1': np.array([1.00, 0.71, 0.77]), + 'pink2': np.array([0.93, 0.66, 0.72]), + 'pink3': np.array([0.80, 0.57, 0.62]), + 'pink4': np.array([0.55, 0.39, 0.42]), + 'LightPink1': np.array([1.00, 0.68, 0.73]), + 'LightPink2': np.array([0.93, 0.64, 0.68]), + 'LightPink3': np.array([0.80, 0.55, 0.58]), + 'LightPink4': np.array([0.55, 0.37, 0.40]), + 'PaleVioletRed1': np.array([1.00, 0.51, 0.67]), + 'PaleVioletRed2': np.array([0.93, 0.47, 0.62]), + 'PaleVioletRed3': np.array([0.80, 0.41, 0.54]), + 'PaleVioletRed4': np.array([0.55, 0.28, 0.36]), + 'maroon1': np.array([1.00, 0.20, 0.70]), + 'maroon2': np.array([0.93, 0.19, 0.65]), + 'maroon3': np.array([0.80, 0.16, 0.56]), + 'maroon4': np.array([0.55, 0.11, 0.38]), + 'VioletRed1': np.array([1.00, 0.24, 0.59]), + 'VioletRed2': np.array([0.93, 0.23, 0.55]), + 'VioletRed3': np.array([0.80, 0.20, 0.47]), + 'VioletRed4': np.array([0.55, 0.13, 0.32]), + 'magenta1': np.array([1.00, 0.00, 1.00]), + 'magenta2': np.array([0.93, 0.00, 0.93]), + 'magenta3': np.array([0.80, 0.00, 0.80]), + 'magenta4': np.array([0.55, 0.00, 0.55]), + 'orchid1': np.array([1.00, 0.51, 0.98]), + 'orchid2': np.array([0.93, 0.48, 0.91]), + 'orchid3': np.array([0.80, 0.41, 0.79]), + 'orchid4': np.array([0.55, 0.28, 0.54]), + 'plum1': np.array([1.00, 0.73, 1.00]), + 'plum2': np.array([0.93, 0.68, 0.93]), + 'plum3': np.array([0.80, 0.59, 0.80]), + 'plum4': np.array([0.55, 0.40, 0.55]), + 'MediumOrchid1': np.array([0.88, 0.40, 1.00]), + 'MediumOrchid2': np.array([0.82, 0.37, 0.93]), + 'MediumOrchid3': np.array([0.71, 0.32, 0.80]), + 'MediumOrchid4': np.array([0.48, 0.22, 0.55]), + 'DarkOrchid1': np.array([0.75, 0.24, 1.00]), + 'DarkOrchid2': np.array([0.70, 0.23, 0.93]), + 'DarkOrchid3': np.array([0.60, 0.20, 0.80]), + 'DarkOrchid4': np.array([0.41, 0.13, 0.55]), + 'purple1': np.array([0.61, 0.19, 1.00]), + 'purple2': np.array([0.57, 0.17, 0.93]), + 'purple3': np.array([0.49, 0.15, 0.80]), + 'purple4': np.array([0.33, 0.10, 0.55]), + 'MediumPurple1': np.array([0.67, 0.51, 1.00]), + 'MediumPurple2': np.array([0.62, 0.47, 0.93]), + 'MediumPurple3': np.array([0.54, 0.41, 0.80]), + 'MediumPurple4': np.array([0.36, 0.28, 0.55]), + 'thistle1': np.array([1.00, 0.88, 1.00]), + 'thistle2': np.array([0.93, 0.82, 0.93]), + 'thistle3': np.array([0.80, 0.71, 0.80]), + 'thistle4': np.array([0.55, 0.48, 0.55]), + 'gray0': np.array([0.00, 0.00, 0.00]), + 'grey0': np.array([0.00, 0.00, 0.00]), + 'gray1': np.array([0.01, 0.01, 0.01]), + 'grey1': np.array([0.01, 0.01, 0.01]), + 'gray2': np.array([0.02, 0.02, 0.02]), + 'grey2': np.array([0.02, 0.02, 0.02]), + 'gray3': np.array([0.03, 0.03, 0.03]), + 'grey3': np.array([0.03, 0.03, 0.03]), + 'gray4': np.array([0.04, 0.04, 0.04]), + 'grey4': np.array([0.04, 0.04, 0.04]), + 'gray5': np.array([0.05, 0.05, 0.05]), + 'grey5': np.array([0.05, 0.05, 0.05]), + 'gray6': np.array([0.06, 0.06, 0.06]), + 'grey6': np.array([0.06, 0.06, 0.06]), + 'gray7': np.array([0.07, 0.07, 0.07]), + 'grey7': np.array([0.07, 0.07, 0.07]), + 'gray8': np.array([0.08, 0.08, 0.08]), + 'grey8': np.array([0.08, 0.08, 0.08]), + 'gray9': np.array([0.09, 0.09, 0.09]), + 'grey9': np.array([0.09, 0.09, 0.09]), + 'gray10': np.array([0.10, 0.10, 0.10]), + 'grey10': np.array([0.10, 0.10, 0.10]), + 'gray11': np.array([0.11, 0.11, 0.11]), + 'grey11': np.array([0.11, 0.11, 0.11]), + 'gray12': np.array([0.12, 0.12, 0.12]), + 'grey12': np.array([0.12, 0.12, 0.12]), + 'gray13': np.array([0.13, 0.13, 0.13]), + 'grey13': np.array([0.13, 0.13, 0.13]), + 'gray14': np.array([0.14, 0.14, 0.14]), + 'grey14': np.array([0.14, 0.14, 0.14]), + 'gray15': np.array([0.15, 0.15, 0.15]), + 'grey15': np.array([0.15, 0.15, 0.15]), + 'gray16': np.array([0.16, 0.16, 0.16]), + 'grey16': np.array([0.16, 0.16, 0.16]), + 'gray17': np.array([0.17, 0.17, 0.17]), + 'grey17': np.array([0.17, 0.17, 0.17]), + 'gray18': np.array([0.18, 0.18, 0.18]), + 'grey18': np.array([0.18, 0.18, 0.18]), + 'gray19': np.array([0.19, 0.19, 0.19]), + 'grey19': np.array([0.19, 0.19, 0.19]), + 'gray20': np.array([0.20, 0.20, 0.20]), + 'grey20': np.array([0.20, 0.20, 0.20]), + 'gray21': np.array([0.21, 0.21, 0.21]), + 'grey21': np.array([0.21, 0.21, 0.21]), + 'gray22': np.array([0.22, 0.22, 0.22]), + 'grey22': np.array([0.22, 0.22, 0.22]), + 'gray23': np.array([0.23, 0.23, 0.23]), + 'grey23': np.array([0.23, 0.23, 0.23]), + 'gray24': np.array([0.24, 0.24, 0.24]), + 'grey24': np.array([0.24, 0.24, 0.24]), + 'gray25': np.array([0.25, 0.25, 0.25]), + 'grey25': np.array([0.25, 0.25, 0.25]), + 'gray26': np.array([0.26, 0.26, 0.26]), + 'grey26': np.array([0.26, 0.26, 0.26]), + 'gray27': np.array([0.27, 0.27, 0.27]), + 'grey27': np.array([0.27, 0.27, 0.27]), + 'gray28': np.array([0.28, 0.28, 0.28]), + 'grey28': np.array([0.28, 0.28, 0.28]), + 'gray29': np.array([0.29, 0.29, 0.29]), + 'grey29': np.array([0.29, 0.29, 0.29]), + 'gray30': np.array([0.30, 0.30, 0.30]), + 'grey30': np.array([0.30, 0.30, 0.30]), + 'gray31': np.array([0.31, 0.31, 0.31]), + 'grey31': np.array([0.31, 0.31, 0.31]), + 'gray32': np.array([0.32, 0.32, 0.32]), + 'grey32': np.array([0.32, 0.32, 0.32]), + 'gray33': np.array([0.33, 0.33, 0.33]), + 'grey33': np.array([0.33, 0.33, 0.33]), + 'gray34': np.array([0.34, 0.34, 0.34]), + 'grey34': np.array([0.34, 0.34, 0.34]), + 'gray35': np.array([0.35, 0.35, 0.35]), + 'grey35': np.array([0.35, 0.35, 0.35]), + 'gray36': np.array([0.36, 0.36, 0.36]), + 'grey36': np.array([0.36, 0.36, 0.36]), + 'gray37': np.array([0.37, 0.37, 0.37]), + 'grey37': np.array([0.37, 0.37, 0.37]), + 'gray38': np.array([0.38, 0.38, 0.38]), + 'grey38': np.array([0.38, 0.38, 0.38]), + 'gray39': np.array([0.39, 0.39, 0.39]), + 'grey39': np.array([0.39, 0.39, 0.39]), + 'gray40': np.array([0.40, 0.40, 0.40]), + 'grey40': np.array([0.40, 0.40, 0.40]), + 'gray41': np.array([0.41, 0.41, 0.41]), + 'grey41': np.array([0.41, 0.41, 0.41]), + 'gray42': np.array([0.42, 0.42, 0.42]), + 'grey42': np.array([0.42, 0.42, 0.42]), + 'gray43': np.array([0.43, 0.43, 0.43]), + 'grey43': np.array([0.43, 0.43, 0.43]), + 'gray44': np.array([0.44, 0.44, 0.44]), + 'grey44': np.array([0.44, 0.44, 0.44]), + 'gray45': np.array([0.45, 0.45, 0.45]), + 'grey45': np.array([0.45, 0.45, 0.45]), + 'gray46': np.array([0.46, 0.46, 0.46]), + 'grey46': np.array([0.46, 0.46, 0.46]), + 'gray47': np.array([0.47, 0.47, 0.47]), + 'grey47': np.array([0.47, 0.47, 0.47]), + 'gray48': np.array([0.48, 0.48, 0.48]), + 'grey48': np.array([0.48, 0.48, 0.48]), + 'gray49': np.array([0.49, 0.49, 0.49]), + 'grey49': np.array([0.49, 0.49, 0.49]), + 'gray50': np.array([0.50, 0.50, 0.50]), + 'grey50': np.array([0.50, 0.50, 0.50]), + 'gray51': np.array([0.51, 0.51, 0.51]), + 'grey51': np.array([0.51, 0.51, 0.51]), + 'gray52': np.array([0.52, 0.52, 0.52]), + 'grey52': np.array([0.52, 0.52, 0.52]), + 'gray53': np.array([0.53, 0.53, 0.53]), + 'grey53': np.array([0.53, 0.53, 0.53]), + 'gray54': np.array([0.54, 0.54, 0.54]), + 'grey54': np.array([0.54, 0.54, 0.54]), + 'gray55': np.array([0.55, 0.55, 0.55]), + 'grey55': np.array([0.55, 0.55, 0.55]), + 'gray56': np.array([0.56, 0.56, 0.56]), + 'grey56': np.array([0.56, 0.56, 0.56]), + 'gray57': np.array([0.57, 0.57, 0.57]), + 'grey57': np.array([0.57, 0.57, 0.57]), + 'gray58': np.array([0.58, 0.58, 0.58]), + 'grey58': np.array([0.58, 0.58, 0.58]), + 'gray59': np.array([0.59, 0.59, 0.59]), + 'grey59': np.array([0.59, 0.59, 0.59]), + 'gray60': np.array([0.60, 0.60, 0.60]), + 'grey60': np.array([0.60, 0.60, 0.60]), + 'gray61': np.array([0.61, 0.61, 0.61]), + 'grey61': np.array([0.61, 0.61, 0.61]), + 'gray62': np.array([0.62, 0.62, 0.62]), + 'grey62': np.array([0.62, 0.62, 0.62]), + 'gray63': np.array([0.63, 0.63, 0.63]), + 'grey63': np.array([0.63, 0.63, 0.63]), + 'gray64': np.array([0.64, 0.64, 0.64]), + 'grey64': np.array([0.64, 0.64, 0.64]), + 'gray65': np.array([0.65, 0.65, 0.65]), + 'grey65': np.array([0.65, 0.65, 0.65]), + 'gray66': np.array([0.66, 0.66, 0.66]), + 'grey66': np.array([0.66, 0.66, 0.66]), + 'gray67': np.array([0.67, 0.67, 0.67]), + 'grey67': np.array([0.67, 0.67, 0.67]), + 'gray68': np.array([0.68, 0.68, 0.68]), + 'grey68': np.array([0.68, 0.68, 0.68]), + 'gray69': np.array([0.69, 0.69, 0.69]), + 'grey69': np.array([0.69, 0.69, 0.69]), + 'gray70': np.array([0.70, 0.70, 0.70]), + 'grey70': np.array([0.70, 0.70, 0.70]), + 'gray71': np.array([0.71, 0.71, 0.71]), + 'grey71': np.array([0.71, 0.71, 0.71]), + 'gray72': np.array([0.72, 0.72, 0.72]), + 'grey72': np.array([0.72, 0.72, 0.72]), + 'gray73': np.array([0.73, 0.73, 0.73]), + 'grey73': np.array([0.73, 0.73, 0.73]), + 'gray74': np.array([0.74, 0.74, 0.74]), + 'grey74': np.array([0.74, 0.74, 0.74]), + 'gray75': np.array([0.75, 0.75, 0.75]), + 'grey75': np.array([0.75, 0.75, 0.75]), + 'gray76': np.array([0.76, 0.76, 0.76]), + 'grey76': np.array([0.76, 0.76, 0.76]), + 'gray77': np.array([0.77, 0.77, 0.77]), + 'grey77': np.array([0.77, 0.77, 0.77]), + 'gray78': np.array([0.78, 0.78, 0.78]), + 'grey78': np.array([0.78, 0.78, 0.78]), + 'gray79': np.array([0.79, 0.79, 0.79]), + 'grey79': np.array([0.79, 0.79, 0.79]), + 'gray80': np.array([0.80, 0.80, 0.80]), + 'grey80': np.array([0.80, 0.80, 0.80]), + 'gray81': np.array([0.81, 0.81, 0.81]), + 'grey81': np.array([0.81, 0.81, 0.81]), + 'gray82': np.array([0.82, 0.82, 0.82]), + 'grey82': np.array([0.82, 0.82, 0.82]), + 'gray83': np.array([0.83, 0.83, 0.83]), + 'grey83': np.array([0.83, 0.83, 0.83]), + 'gray84': np.array([0.84, 0.84, 0.84]), + 'grey84': np.array([0.84, 0.84, 0.84]), + 'gray85': np.array([0.85, 0.85, 0.85]), + 'grey85': np.array([0.85, 0.85, 0.85]), + 'gray86': np.array([0.86, 0.86, 0.86]), + 'grey86': np.array([0.86, 0.86, 0.86]), + 'gray87': np.array([0.87, 0.87, 0.87]), + 'grey87': np.array([0.87, 0.87, 0.87]), + 'gray88': np.array([0.88, 0.88, 0.88]), + 'grey88': np.array([0.88, 0.88, 0.88]), + 'gray89': np.array([0.89, 0.89, 0.89]), + 'grey89': np.array([0.89, 0.89, 0.89]), + 'gray90': np.array([0.90, 0.90, 0.90]), + 'grey90': np.array([0.90, 0.90, 0.90]), + 'gray91': np.array([0.91, 0.91, 0.91]), + 'grey91': np.array([0.91, 0.91, 0.91]), + 'gray92': np.array([0.92, 0.92, 0.92]), + 'grey92': np.array([0.92, 0.92, 0.92]), + 'gray93': np.array([0.93, 0.93, 0.93]), + 'grey93': np.array([0.93, 0.93, 0.93]), + 'gray94': np.array([0.94, 0.94, 0.94]), + 'grey94': np.array([0.94, 0.94, 0.94]), + 'gray95': np.array([0.95, 0.95, 0.95]), + 'grey95': np.array([0.95, 0.95, 0.95]), + 'gray96': np.array([0.96, 0.96, 0.96]), + 'grey96': np.array([0.96, 0.96, 0.96]), + 'gray97': np.array([0.97, 0.97, 0.97]), + 'grey97': np.array([0.97, 0.97, 0.97]), + 'gray98': np.array([0.98, 0.98, 0.98]), + 'grey98': np.array([0.98, 0.98, 0.98]), + 'gray99': np.array([0.99, 0.99, 0.99]), + 'grey99': np.array([0.99, 0.99, 0.99]), + 'gray100': np.array([1.00, 1.00, 1.00]), + 'grey100': np.array([1.00, 1.00, 1.00]), + 'dark grey': np.array([0.66, 0.66, 0.66]), + 'DarkGrey': np.array([0.66, 0.66, 0.66]), + 'dark gray': np.array([0.66, 0.66, 0.66]), + 'DarkGray': np.array([0.66, 0.66, 0.66]), + 'dark blue': np.array([0.00, 0.00, 0.55]), + 'DarkBlue': np.array([0.00, 0.00, 0.55]), + 'dark cyan': np.array([0.00, 0.55, 0.55]), + 'DarkCyan': np.array([0.00, 0.55, 0.55]), + 'dark magenta': np.array([0.55, 0.00, 0.55]), + 'DarkMagenta': np.array([0.55, 0.00, 0.55]), + 'dark red': np.array([0.55, 0.00, 0.00]), + 'DarkRed': np.array([0.55, 0.00, 0.00]), + 'light green': np.array([0.56, 0.93, 0.56]), + 'LightGreen': np.array([0.56, 0.93, 0.56]) +} + + +if __name__ == '__main__': + main() diff --git a/mesh-master/mesh/errors.py b/mesh-master/mesh/errors.py new file mode 100644 index 0000000000000000000000000000000000000000..69746f2c28168d4a06aada4f4478f539ede82086 --- /dev/null +++ b/mesh-master/mesh/errors.py @@ -0,0 +1,15 @@ +# Copyright (c) 2017 Max Planck Society. All rights reserved. + +""" +Error heirarchy for Mesh class +""" + + +class MeshError(Exception): + """Base error class for Mesh-related errors""" + pass + + +class SerializationError(MeshError): + """Mesh reading or writing errors""" + pass diff --git a/mesh-master/mesh/fonts.py b/mesh-master/mesh/fonts.py new file mode 100644 index 0000000000000000000000000000000000000000..1d8add55c74886dd3ea702d978574c03f7463451 --- /dev/null +++ b/mesh-master/mesh/fonts.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Copyright (c) 2013 Max Planck Society. All rights reserved. + +import os +import numpy as np +from OpenGL.GL import glPixelStorei, \ + glGenTextures, \ + glBindTexture, \ + glGenerateMipmap, \ + glHint, \ + glTexImage2D +from OpenGL.GL import GL_UNPACK_ALIGNMENT, \ + GL_TEXTURE_2D, \ + GL_RGB, \ + GL_BGR, \ + GL_GENERATE_MIPMAP_HINT, \ + GL_NICEST, \ + GL_UNSIGNED_BYTE + + +def get_image_with_text(text, fgcolor, bgcolor): + if not hasattr(get_image_with_text, 'cache'): + get_image_with_text.cache = {} + + import zlib + uid = str(zlib.crc32(text)) + str(zlib.crc32(np.array(fgcolor))) + str(zlib.crc32(np.array(bgcolor))) + if uid not in get_image_with_text.cache: + from PIL import ImageFont + from PIL import Image + from PIL import ImageDraw + + font = ImageFont.truetype("/Library/Fonts/Courier New.ttf", 30) + + imsize = (256, 256) + + bgarray = np.asarray(np.zeros((imsize[0], imsize[1], 3)), np.uint8) + bgarray[:, :, 0] += bgcolor[0] + bgarray[:, :, 1] += bgcolor[1] + bgarray[:, :, 2] += bgcolor[2] + img = Image.fromarray(bgarray) + draw = ImageDraw.Draw(img) + w, h = draw.textsize(text, font=font) + text_pos = ((imsize[0] - w) / 2, (imsize[1] - h) / 2) + draw.text(text_pos, text, fill=fgcolor, font=font) + get_image_with_text.cache[uid] = np.array(img.getdata()).reshape(img.size[0], img.size[1], 3) * 255 + return get_image_with_text.cache[uid] + + +def get_textureid_with_text(text, fgcolor, bgcolor): + if not hasattr(get_textureid_with_text, 'cache'): + get_textureid_with_text.cache = {} + + import zlib + uid = str(zlib.crc32(text)) + str(zlib.crc32(np.array(fgcolor))) + str(zlib.crc32(np.array(bgcolor))) + if uid not in get_textureid_with_text.cache: + from PIL import ImageFont + from PIL import Image + from PIL import ImageDraw + + font = ImageFont.truetype(os.path.join(os.path.dirname(__file__), + "ressources", + "Arial.ttf"), + 100) + + imsize = (128, 128) + + bgarray = np.asarray(np.zeros((imsize[0], imsize[1], 3)), np.uint8) + bgarray[:, :, 0] += bgcolor[0] + bgarray[:, :, 1] += bgcolor[1] + bgarray[:, :, 2] += bgcolor[2] + img = Image.fromarray(bgarray) + draw = ImageDraw.Draw(img) + w, h = draw.textsize(text, font=font) + text_pos = ((imsize[0] - w) / 2, (imsize[1] - h) / 2) + draw.text(text_pos, text, fill=tuple(np.asarray(fgcolor, np.uint8)), font=font) + texture_data = np.asarray(np.array(img.getdata()).reshape(img.size[0], img.size[1], 3) * 255, np.uint8) + + textureID = glGenTextures(1) + glPixelStorei(GL_UNPACK_ALIGNMENT, 1) + glBindTexture(GL_TEXTURE_2D, textureID) + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, texture_data.shape[1], texture_data.shape[0], 0, GL_BGR, GL_UNSIGNED_BYTE, texture_data.flatten()) + glHint(GL_GENERATE_MIPMAP_HINT, GL_NICEST) # must be GL_FASTEST, GL_NICEST or GL_DONT_CARE + glGenerateMipmap(GL_TEXTURE_2D) + get_textureid_with_text.cache[uid] = textureID + + return get_textureid_with_text.cache[uid] diff --git a/mesh-master/mesh/geometry/__init__.py b/mesh-master/mesh/geometry/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..52544ca0073c7ac8e827b343e4911fd197faf38b --- /dev/null +++ b/mesh-master/mesh/geometry/__init__.py @@ -0,0 +1,4 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2016 Max Planck Society. All rights reserved. diff --git a/mesh-master/mesh/geometry/barycentric_coordinates_of_projection.py b/mesh-master/mesh/geometry/barycentric_coordinates_of_projection.py new file mode 100644 index 0000000000000000000000000000000000000000..8a6aad657a5a88ec9fb6f3d15a9ef69564c4094f --- /dev/null +++ b/mesh-master/mesh/geometry/barycentric_coordinates_of_projection.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2013 Max Planck Society. All rights reserved. + +from numpy import cross, sum, isscalar, spacing, vstack + + +def barycentric_coordinates_of_projection(p, q, u, v): + """Given a point, gives projected coords of that point to a triangle + in barycentric coordinates. + + See + + **Heidrich**, Computing the Barycentric Coordinates of a Projected Point, JGT 05 + at http://www.cs.ubc.ca/~heidrich/Papers/JGT.05.pdf + + :param p: point to project + :param q: a vertex of the triangle to project into + :param u,v: edges of the the triangle such that it has vertices ``q``, ``q+u``, ``q+v`` + + :returns: barycentric coordinates of ``p``'s projection in triangle defined by ``q``, ``u``, ``v`` + vectorized so ``p``, ``q``, ``u``, ``v`` can all be ``3xN`` + """ + + p = p.T + q = q.T + u = u.T + v = v.T + + n = cross(u, v, axis=0) + s = sum(n * n, axis=0) + + # If the triangle edges are collinear, cross-product is zero, + # which makes "s" 0, which gives us divide by zero. So we + # make the arbitrary choice to set s to epsv (=numpy.spacing(1)), + # the closest thing to zero + if isscalar(s): + s = s if s else spacing(1) + else: + s[s == 0] = spacing(1) + + oneOver4ASquared = 1.0 / s + w = p - q + b2 = sum(cross(u, w, axis=0) * n, axis=0) * oneOver4ASquared + b1 = sum(cross(w, v, axis=0) * n, axis=0) * oneOver4ASquared + b = vstack((1 - b1 - b2, b1, b2)) + + return b.T diff --git a/mesh-master/mesh/geometry/cross_product.py b/mesh-master/mesh/geometry/cross_product.py new file mode 100644 index 0000000000000000000000000000000000000000..fc1aeb47be580c5373ce83e21a6be6e2c527c193 --- /dev/null +++ b/mesh-master/mesh/geometry/cross_product.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2012 Max Planck Society. All rights reserved. +# Created by Matthew Loper on 2012-07-20. + +import numpy as np + + +def CrossProduct(a, b): + """Computes the cross product of 2 vectors""" + a = a.reshape(-1, 3) + b = b.reshape(-1, 3) + + a1 = a[:, 0] + a2 = a[:, 1] + a3 = a[:, 2] + + Ax = np.zeros((len(a1), 3, 3)) + Ax[:, 0, 1] = -a3 + Ax[:, 0, 2] = +a2 + Ax[:, 1, 0] = +a3 + Ax[:, 1, 2] = -a1 + Ax[:, 2, 0] = -a2 + Ax[:, 2, 1] = +a1 + + return _call_einsum_matvec(Ax, b) + + +def _call_einsum_matvec(m, righthand): + r = righthand.reshape(m.shape[0], 3) + return np.einsum('ijk,ik->ij', m, r).flatten() + + +def _call_einsum_matmat(m, righthand): + r = righthand.reshape(m.shape[0], 3, -1) + return np.einsum('ijk,ikm->ijm', m, r).reshape(-1, r.shape[2]) diff --git a/mesh-master/mesh/geometry/rodrigues.py b/mesh-master/mesh/geometry/rodrigues.py new file mode 100644 index 0000000000000000000000000000000000000000..c405922f2588ab8ec3faca8cdf932df309742c47 --- /dev/null +++ b/mesh-master/mesh/geometry/rodrigues.py @@ -0,0 +1,125 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2012 Max Planck Society. All rights reserved. +# Created by Matthew Loper on 2012-07-22. + +import numpy as np + + +def rodrigues(r, calculate_jacobian=True): + """Computes the Rodrigues transform and its derivative + + :param r: either a 3-vector representing the rotation parameter, or a full rotation matrix + :param calculate_jacobian: indicates if the Jacobian of the transform is also required + :returns: If `calculate_jacobian` is `True`, the Jacobian is given as the second element of the returned tuple. + """ + + r = np.array(r, dtype=np.double) + eps = np.finfo(np.double).eps + + if np.all(r.shape == (3, 1)) or np.all(r.shape == (1, 3)) or np.all(r.shape == (3,)): + r = r.flatten() + theta = np.linalg.norm(r) + if theta < eps: + r_out = np.eye(3) + if calculate_jacobian: + jac = np.zeros((3, 9)) + jac[0, 5] = jac[1, 6] = jac[2, 1] = -1 + jac[0, 7] = jac[1, 2] = jac[2, 3] = 1 + + else: + c = np.cos(theta) + s = np.sin(theta) + c1 = 1. - c + itheta = 1.0 if theta == 0.0 else 1.0 / theta + r *= itheta + I = np.eye(3) + rrt = np.array([r * r[0], r * r[1], r * r[2]]) + _r_x_ = np.array([[0, -r[2], r[1]], [r[2], 0, -r[0]], [-r[1], r[0], 0]]) + r_out = c * I + c1 * rrt + s * _r_x_ + if calculate_jacobian: + drrt = np.array([[r[0] + r[0], r[1], r[2], r[1], 0, 0, r[2], 0, 0], + [0, r[0], 0, r[0], r[1] + r[1], r[2], 0, r[2], 0], + [0, 0, r[0], 0, 0, r[1], r[0], r[1], r[2] + r[2]]]) + d_r_x_ = np.array([[0, 0, 0, 0, 0, -1, 0, 1, 0], + [0, 0, 1, 0, 0, 0, -1, 0, 0], + [0, -1, 0, 1, 0, 0, 0, 0, 0]]) + I = np.array([I.flatten(), I.flatten(), I.flatten()]) + ri = np.array([[r[0]], [r[1]], [r[2]]]) + a0 = -s * ri + a1 = (s - 2 * c1 * itheta) * ri + a2 = np.ones((3, 1)) * c1 * itheta + a3 = (c - s * itheta) * ri + a4 = np.ones((3, 1)) * s * itheta + jac = a0 * I + a1 * rrt.flatten() + a2 * drrt + a3 * _r_x_.flatten() + a4 * d_r_x_ + elif np.all(r.shape == (3, 3)): + u, d, v = np.linalg.svd(r) + r = np.dot(u, v) + rx = r[2, 1] - r[1, 2] + ry = r[0, 2] - r[2, 0] + rz = r[1, 0] - r[0, 1] + s = np.linalg.norm(np.array([rx, ry, rz])) * np.sqrt(0.25) + c = np.clip((np.sum(np.diag(r)) - 1) * 0.5, -1, 1) + theta = np.arccos(c) + if s < 1e-5: + if c > 0: + r_out = np.zeros((3, 1)) + else: + rx, ry, rz = np.clip(np.sqrt((np.diag(r) + 1) * 0.5), 0, np.inf) + if r[0, 1] < 0: + ry = -ry + if r[0, 2] < 0: + rz = -rz + if np.abs(rx) < np.abs(ry) and np.abs(rx) < np.abs(rz) and ((r[1, 2] > 0) != (ry * rz > 0)): + rz = -rz + + r_out = np.array([[rx, ry, rz]]).T + theta /= np.linalg.norm(r_out) + r_out *= theta + if calculate_jacobian: + jac = np.zeros((9, 3)) + if c > 0: + jac[1, 2] = jac[5, 0] = jac[6, 1] = -0.5 + jac[2, 1] = jac[3, 2] = jac[7, 0] = 0.5 + else: + vth = 1.0 / (2.0 * s) + if calculate_jacobian: + dtheta_dtr = -1. / s + dvth_dtheta = -vth * c / s + d1 = 0.5 * dvth_dtheta * dtheta_dtr + d2 = 0.5 * dtheta_dtr + dvardR = np.array([ + [0, 0, 0, 0, 0, 1, 0, -1, 0], + [0, 0, -1, 0, 0, 0, 1, 0, 0], + [0, 1, 0, -1, 0, 0, 0, 0, 0], + [d1, 0, 0, 0, d1, 0, 0, 0, d1], + [d2, 0, 0, 0, d2, 0, 0, 0, d2]]) + dvar2dvar = np.array([ + [vth, 0, 0, rx, 0], + [0, vth, 0, ry, 0], + [0, 0, vth, rz, 0], + [0, 0, 0, 0, 1]]) + domegadvar2 = np.array([ + [theta, 0, 0, rx * vth], + [0, theta, 0, ry * vth], + [0, 0, theta, rz * vth]]) + jac = np.dot(np.dot(domegadvar2, dvar2dvar), dvardR) + for ii in range(3): + jac[ii] = jac[ii].reshape((3, 3)).T.flatten() + jac = jac.T + vth *= theta + r_out = np.array([[rx, ry, rz]]).T * vth + else: + raise Exception("rodrigues: input matrix must be 1x3, 3x1 or 3x3.") + if calculate_jacobian: + return r_out, jac + else: + return r_out + + +def rodrigues2rotmat(r): + # R = np.zeros((3, 3)) + r_skew = np.array([[0, -r[2], r[1]], [r[2], 0, -r[0]], [-r[1], r[0], 0]]) + theta = np.linalg.norm(r) + return np.identity(3) + np.sin(theta) * r_skew + (1 - np.cos(theta)) * r_skew.dot(r_skew) diff --git a/mesh-master/mesh/geometry/tri_normals.py b/mesh-master/mesh/geometry/tri_normals.py new file mode 100644 index 0000000000000000000000000000000000000000..0d503b88d43ba46835d1b3df13e54302f0a5b22d --- /dev/null +++ b/mesh-master/mesh/geometry/tri_normals.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2012 Max Planck Society. All rights reserved. +# Created by Matthew Loper on 2012-07-22. + + +""" +tri_normals.py + +""" + +from ..utils import col +from .cross_product import CrossProduct + +import numpy as np + + +def TriNormals(v, f): + return NormalizedNx3(TriNormalsScaled(v, f)) + + +def TriNormalsScaled(v, f): + return CrossProduct(TriEdges(v, f, 1, 0), TriEdges(v, f, 2, 0)) + + +def NormalizedNx3(v): + ss = np.sum(v.reshape(-1, 3) ** 2, axis=1) + ss[ss == 0] = 1 + s = np.sqrt(ss) + + return (v.reshape(-1, 3) / col(s)).flatten() + + +def TriEdges(v, f, cplus, cminus): + assert(cplus >= 0 and cplus <= 2 and cminus >= 0 and cminus <= 2) + return _edges_for(v, f, cplus, cminus) + + +def _edges_for(v, f, cplus, cminus): + return ( + v.reshape(-1, 3)[f[:, cplus], :] - + v.reshape(-1, 3)[f[:, cminus], :]).ravel() + + +def TriToScaledNormal(x, tri): + + v = x.reshape(-1, 3) + + def v_xyz(iV): + return v[tri[:, iV], :] + + return np.cross(v_xyz(1) - v_xyz(0), v_xyz(2) - v_xyz(0)) + + +def _bsxfun(oper, a, b): + if a.shape[0] == b.shape[0] or a.shape[1] == b.shape[1]: + return oper(a, b) + elif min(a.shape) == 1 and min(b.shape) == 1: + if a.shape[0] == 1: + return oper(np.tile(a, (b.shape[0], 1)), b) + else: + return oper(np.tile(a, (1, b.shape[1], b))) + else: + raise '_bsxfun failure' + + +def NormalizeRows(x): + + s = (np.sqrt(np.sum(x ** 2, axis=1))).flatten() + s[s == 0] = 1 + return _bsxfun(np.divide, x, col(s)) diff --git a/mesh-master/mesh/geometry/triangle_area.py b/mesh-master/mesh/geometry/triangle_area.py new file mode 100644 index 0000000000000000000000000000000000000000..604e4bea97723325fbe0a7977eb3412f835cb723 --- /dev/null +++ b/mesh-master/mesh/geometry/triangle_area.py @@ -0,0 +1,12 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2013 Max Planck Society. All rights reserved. + +from .tri_normals import TriToScaledNormal +import numpy as np + + +def triangle_area(v, f): + """Computes the area associated to a set of triangles""" + return (np.sqrt(np.sum(TriToScaledNormal(v, f) ** 2, axis=1)) / 2.).flatten() diff --git a/mesh-master/mesh/geometry/vert_normals.py b/mesh-master/mesh/geometry/vert_normals.py new file mode 100644 index 0000000000000000000000000000000000000000..f0e8c59d3474f3bd1b8564029ea8e9c80cb3664d --- /dev/null +++ b/mesh-master/mesh/geometry/vert_normals.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2013 Max Planck Society. All rights reserved. +# Created by Matthew Loper on 2013-03-12. + + +import scipy.sparse as sp +import numpy as np +from .tri_normals import NormalizedNx3, TriNormalsScaled +from ..utils import col + + +def MatVecMult(mtx, vec): + return mtx.dot(col(vec)).flatten() + + +def VertNormals(v, f): + return NormalizedNx3(VertNormalsScaled(v, f)) + + +def VertNormalsScaled(v, f): + IS = f.flatten() + JS = np.array([range(f.shape[0])] * 3).T.flatten() + data = np.ones(len(JS)) + + IS = np.concatenate((IS * 3, IS * 3 + 1, IS * 3 + 2)) + JS = np.concatenate((JS * 3, JS * 3 + 1, JS * 3 + 2)) # is this right? + data = np.concatenate((data, data, data)) + + faces_by_vertex = sp.csc_matrix((data, (IS, JS)), shape=(v.size, f.size)) + + # faces_by_vertex should be 3 x wider...? + return NormalizedNx3(MatVecMult(faces_by_vertex, TriNormalsScaled(v, f))) diff --git a/mesh-master/mesh/landmarks.py b/mesh-master/mesh/landmarks.py new file mode 100644 index 0000000000000000000000000000000000000000..8f7468dfbaaa69d7a925170904f8fa086d318794 --- /dev/null +++ b/mesh-master/mesh/landmarks.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Copyright (c) 2013 Max Planck Society. All rights reserved. +# Created by Matthew Loper on 2013-02-20. + + +import numpy as np + +""" +landmarks.py + +""" + + +def landm_xyz_linear_transform(self, ordering=None): + from .utils import col, sparse + + landmark_order = ordering if ordering else self.landm_names + # construct a sparse matrix that converts between the landmark pts and all vertices, with height (# landmarks * 3) and width (# vertices * 3) + if hasattr(self, 'landm_regressors'): + landmark_coefficients = np.hstack([self.landm_regressors[name][1] for name in landmark_order]) + landmark_indices = np.hstack([self.landm_regressors[name][0] for name in landmark_order]) + column_indices = np.hstack([col(3 * landmark_indices + i) for i in range(3)]).flatten() + row_indices = np.hstack([[3 * index, 3 * index + 1, 3 * index + 2] * len(self.landm_regressors[landmark_order[index]][0]) for index in np.arange(len(landmark_order))]) + values = np.hstack([col(landmark_coefficients) for i in range(3)]).flatten() + return sparse(row_indices, column_indices, values, 3 * len(landmark_order), 3 * self.v.shape[0]) + elif hasattr(self, 'landm'): + landmark_indices = np.array([self.landm[name] for name in landmark_order]) + column_indices = np.hstack(([col(3 * landmark_indices + i) for i in range(3)])).flatten() + row_indices = np.arange(3 * len(landmark_order)) + return sparse(row_indices, column_indices, np.ones(len(column_indices)), 3 * len(landmark_order), 3 * self.v.shape[0]) + else: + return np.zeros((0, 0)) + + +@property +def landm_xyz(self, ordering=None): + landmark_order = ordering if ordering else self.landm_names + landmark_vertex_locations = (self.landm_xyz_linear_transform(landmark_order) * self.v.flatten()).reshape(-1, 3) if landmark_order else np.zeros((0, 0)) + if landmark_order: + return dict([(landmark_order[i], xyz) for i, xyz in enumerate(landmark_vertex_locations)]) + return {} + + +def recompute_landmark_indices(self, landmark_fname=None, safe_mode=True): + filtered_landmarks = dict( + filter( + lambda e, : e[1] != [0.0, 0.0, 0.0], + self.landm_raw_xyz.items() + ) if (landmark_fname and safe_mode) else self.landm_raw_xyz.items()) + if len(filtered_landmarks) != len(self.landm_raw_xyz): + print("WARNING: %d landmarks in file %s are positioned at (0.0, 0.0, 0.0) and were ignored" % (len(self.landm_raw_xyz) - len(filtered_landmarks), landmark_fname)) + + self.landm = {} + self.landm_regressors = {} + if filtered_landmarks: + landmark_names = list(filtered_landmarks.keys()) + closest_vertices, _ = self.closest_vertices(np.array(list(filtered_landmarks.values()))) + self.landm = dict(zip(landmark_names, closest_vertices)) + if len(self.f): + face_indices, closest_points = self.closest_faces_and_points(np.array(list(filtered_landmarks.values()))) + vertex_indices, coefficients = self.barycentric_coordinates_for_points(closest_points, face_indices) + self.landm_regressors = dict([(name, (vertex_indices[i], coefficients[i])) for i, name in enumerate(landmark_names)]) + else: + self.landm_regressors = dict([(name, (np.array([closest_vertices[i]]), np.array([1.0]))) for i, name in enumerate(landmark_names)]) + + +def set_landmarks_from_xyz(self, landm_raw_xyz): + self.landm_raw_xyz = landm_raw_xyz if hasattr(landm_raw_xyz, 'keys') else dict((str(i), l) for i, l in enumerate(landm_raw_xyz)) + self.recompute_landmark_indices() + + +def is_vertex(x): + return hasattr(x, "__len__") and len(x) == 3 + + +def is_index(x): + return isinstance(x, (int, np.int32, np.int64)) + + +def set_landmarks_from_raw(self, landmarks): + ''' + can accept: + {'name1': [float, float, float], 'name2': [float, float, float], ...} + {'name1': np.array([float, float, float]), 'name2': np.array([float, float, float]), ...} + [[float,float,float],[float,float,float], ...] + np.array([[float,float,float],[float,float,float], ...]) + [np.array([float,float,float]),np.array([float,float,float]), ...] + {'name1': int, 'name2': int, ...} + [int,int,int] + np.array([int,int,int]) + ''' + landmarks = landmarks if hasattr(landmarks, 'keys') else dict((str(i), l) for i, l in enumerate(landmarks)) + + if all(is_vertex(x) for x in landmarks.values()): + landmarks = dict((i, np.array(l)) for i, l in landmarks.items()) + self.set_landmarks_from_xyz(landmarks) + elif all(is_index(x) for x in landmarks.values()): + self.landm = landmarks + self.recompute_landmark_xyz() + else: + raise Exception("Can't parse landmarks") diff --git a/mesh-master/mesh/lines.py b/mesh-master/mesh/lines.py new file mode 100644 index 0000000000000000000000000000000000000000..927d8d58749f8911208f3f017bf6b1d60fbf91bb --- /dev/null +++ b/mesh-master/mesh/lines.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Copyright (c) 2012 Max Planck Society. All rights reserved. + +import numpy as np +from . import colors + + +class Lines(object): + """Collection of 3D lines + + Attributes: + v: Vx3 array of vertices + e: Ex2 array of edges + """ + + def __init__(self, v, e, vc=None, ec=None): + + self.v = np.array(v) + self.e = np.array(e) + + if vc is not None: + self.set_vertex_colors(vc) + + if ec is not None: + self.set_edge_colors(ec) + + def colors_like(self, color, arr): + from .utils import row, col + if isinstance(color, str): + color = colors.name_to_rgb[color] + elif isinstance(color, list): + color = np.array(color) + + if color.shape == (arr.shape[0],): + def jet(v): + fourValue = 4 * v + red = min(fourValue - 1.5, -fourValue + 4.5) + green = min(fourValue - 0.5, -fourValue + 3.5) + blue = min(fourValue + 0.5, -fourValue + 2.5) + result = np.array([red, green, blue]) + result[result > 1.0] = 1.0 + result[result < 0.0] = 0.0 + return row(result) + color = col(color) + color = np.concatenate([jet(color[i]) for i in xrange(color.size)], axis=0) + + return np.ones((arr.shape[0], 3)) * color + + def set_vertex_colors(self, vc): + self.vc = self.colors_like(vc, self.v) + + def set_edge_colors(self, ec): + self.ec = self.colors_like(ec, self.e) + + def write_obj(self, filename): + with open(filename, 'w') as fi: + for r in self.v: + fi.write('v %f %f %f\n' % (r[0], r[1], r[2])) + for e in self.e: + fi.write('l %d %d\n' % (e[0] + 1, e[1] + 1)) diff --git a/mesh-master/mesh/mesh.py b/mesh-master/mesh/mesh.py new file mode 100644 index 0000000000000000000000000000000000000000..76c0c26d9c7fc7231009ed36d8341fd82a94fe2b --- /dev/null +++ b/mesh-master/mesh/mesh.py @@ -0,0 +1,492 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2012 Max Planck Society. All rights reserved. + +""" +Mesh module +----------- + +""" + + +import os +from functools import reduce + +import numpy as np + +from . import colors +from . import search + +try: + from .serialization import serialization +except ImportError: + pass + +from . import landmarks +from . import texture +from . import processing + + +__all__ = ["Mesh"] + + +class Mesh(object): + """3d Triangulated Mesh class + + Attributes: + v: Vx3 array of vertices + f: Fx3 array of faces + + Optional attributes: + fc: Fx3 array of face colors + vc: Vx3 array of vertex colors + vn: Vx3 array of vertex normals + segm: dictionary of part names to triangle indices + + """ + def __init__(self, + v=None, + f=None, + segm=None, + filename=None, + ppfilename=None, + lmrkfilename=None, + basename=None, + vc=None, + fc=None, + vscale=None, + landmarks=None): + """ + :param v: vertices + :param f: faces + :param filename: a filename from which a mesh is loaded + """ + + if filename is not None: + self.load_from_file(filename) + if hasattr(self, 'f'): + self.f = np.require(self.f, dtype=np.uint32) + self.v = np.require(self.v, dtype=np.float64) + self.filename = filename + if vscale is not None: + self.v *= vscale + if v is not None: + self.v = np.array(v, dtype=np.float64) + if vscale is not None: + self.v *= vscale + if f is not None: + self.f = np.require(f, dtype=np.uint32) + + self.basename = basename + if self.basename is None and filename is not None: + self.basename = os.path.splitext(os.path.basename(filename))[0] + + if segm is not None: + self.segm = segm + if landmarks is not None: + self.set_landmark_indices_from_any(landmarks) + if ppfilename is not None: + self.set_landmark_indices_from_ppfile(ppfilename) + if lmrkfilename is not None: + self.set_landmark_indices_from_lmrkfile(lmrkfilename) + + if vc is not None: + self.set_vertex_colors(vc) + + if fc is not None: + self.set_face_colors(fc) + + def __del__(self): + if hasattr(self, 'textureID'): + from OpenGL.GL import glDeleteTextures + glDeleteTextures([self.textureID]) + + def edges_as_lines(self, copy_vertices=False): + from .lines import Lines + edges = self.f[:, [0, 1, 1, 2, 2, 0]].flatten().reshape(-1, 2) + verts = self.v.copy() if copy_vertices else self.v + return Lines(v=verts, e=edges) + + def show(self, mv=None, meshes=[], lines=[]): + from .meshviewer import MeshViewer + from .utils import row + + if mv is None: + mv = MeshViewer(keepalive=True) + + if hasattr(self, 'landm'): + from .sphere import Sphere + sphere = Sphere(np.zeros((3)), 1.).to_mesh() + scalefactor = 1e-2 * np.max(np.max(self.v) - np.min(self.v)) / np.max(np.max(sphere.v) - np.min(sphere.v)) + sphere.v = sphere.v * scalefactor + spheres = [Mesh(vc='SteelBlue', f=sphere.f, v=sphere.v + row(np.array(self.landm_raw_xyz[k]))) for k in self.landm.keys()] + mv.set_dynamic_meshes([self] + spheres + meshes, blocking=True) + else: + mv.set_dynamic_meshes([self] + meshes, blocking=True) + mv.set_dynamic_lines(lines) + return mv + + def colors_like(self, color, arr=None): + from .utils import row, col + + if arr is None: + arr = np.zeros(self.v.shape) + + # if arr is single-dim, reshape it + if arr.ndim == 1 or arr.shape[1] == 1: + arr = arr.reshape(-1, 3) + + if isinstance(color, str): + color = colors.name_to_rgb[color] + elif isinstance(color, list): + color = np.array(color) + + if color.shape[0] == arr.shape[0] and color.shape[0] == color.size: + def jet(v): + fourValue = 4 * v + red = min(fourValue - 1.5, -fourValue + 4.5) + green = min(fourValue - 0.5, -fourValue + 3.5) + blue = min(fourValue + 0.5, -fourValue + 2.5) + result = np.array([red, green, blue]) + result[result > 1.0] = 1.0 + result[result < 0.0] = 0.0 + return row(result) + color = col(color) + color = np.concatenate([jet(color[i]) for i in range(color.size)], axis=0) + + return np.ones_like(arr) * color + + def set_vertex_colors(self, vc, vertex_indices=None): + if vertex_indices is not None: + self.vc[vertex_indices] = self.colors_like(vc, self.v[vertex_indices]) + else: + self.vc = self.colors_like(vc, self.v) + return self + + def set_vertex_colors_from_weights(self, weights, scale_to_range_1=True, color=True): + # from numpy import ones_like + if weights is None: + return self + if scale_to_range_1: + weights = weights - np.min(weights) + weights = (1.0 - 0.0) * weights / np.max(weights) + 0.0 + if color: + from matplotlib import cm + self.vc = cm.jet(weights)[:, :3] + else: + self.vc = np.tile(np.reshape(weights, (len(weights), 1)), (1, 3)) # *ones_like(self.v) + return self + + def scale_vertex_colors(self, weights, w_min=0.0, w_max=1.0): + if weights is None: + return self + weights = weights - np.min(weights) + weights = (w_max - w_min) * weights / np.max(weights) + w_min + self.vc = (weights * self.vc.T).T if weights is not None else self.vc + return self + + def set_face_colors(self, fc): + self.fc = self.colors_like(fc, self.f) + return self + + def faces_by_vertex(self, as_sparse_matrix=False): + import scipy.sparse as sp + if not as_sparse_matrix: + faces_by_vertex = [[] for i in range(len(self.v))] + for i, face in enumerate(self.f): + faces_by_vertex[face[0]].append(i) + faces_by_vertex[face[1]].append(i) + faces_by_vertex[face[2]].append(i) + else: + row = self.f.flatten() + col = np.array([range(self.f.shape[0])] * 3).T.flatten() + data = np.ones(len(col)) + faces_by_vertex = sp.csr_matrix((data, (row, col)), shape=(self.v.shape[0], self.f.shape[0])) + return faces_by_vertex + + def estimate_vertex_normals(self, face_to_verts_sparse_matrix=None): + from .geometry.tri_normals import TriNormalsScaled + + face_normals = TriNormalsScaled(self.v, self.f).reshape(-1, 3) + ftov = face_to_verts_sparse_matrix if face_to_verts_sparse_matrix else self.faces_by_vertex(as_sparse_matrix=True) + non_scaled_normals = ftov * face_normals + norms = (np.sum(non_scaled_normals ** 2.0, axis=1) ** 0.5).T + norms[norms == 0] = 1.0 + return (non_scaled_normals.T / norms).T + + def barycentric_coordinates_for_points(self, points, face_indices): + from .geometry.barycentric_coordinates_of_projection import barycentric_coordinates_of_projection + vertex_indices = self.f[face_indices.flatten(), :] + tri_vertices = np.array([self.v[vertex_indices[:, 0]], self.v[vertex_indices[:, 1]], self.v[vertex_indices[:, 2]]]) + return vertex_indices, barycentric_coordinates_of_projection(points, tri_vertices[0, :], tri_vertices[1, :] - tri_vertices[0, :], tri_vertices[2, :] - tri_vertices[0, :]) + + def transfer_segm(self, mesh, exclude_empty_parts=True): + self.segm = {} + if hasattr(mesh, 'segm'): + face_centers = np.array([self.v[face, :].mean(axis=0) for face in self.f]) + (closest_faces, closest_points) = mesh.closest_faces_and_points(face_centers) + mesh_parts_by_face = mesh.parts_by_face() + parts_by_face = [mesh_parts_by_face[face] for face in closest_faces.flatten()] + self.segm = dict([(part, []) for part in mesh.segm.keys()]) + for face, part in enumerate(parts_by_face): + self.segm[part].append(face) + for part in self.segm.keys(): + self.segm[part].sort() + if exclude_empty_parts and not self.segm[part]: + del self.segm[part] + + @property + def verts_by_segm(self): + return dict((segment, sorted(set(self.f[indices].flatten()))) for segment, indices in self.segm.items()) + + def parts_by_face(self): + segments_by_face = [''] * len(self.f) + for part in self.segm.keys(): + for face in self.segm[part]: + segments_by_face[face] = part + return segments_by_face + + def verts_in_common(self, segments): + """ + returns array of all vertex indices common to each segment in segments""" + return sorted(reduce(lambda s0, s1: s0.intersection(s1), + [set(self.verts_by_segm[segm]) for segm in segments])) + # # indices of vertices in the faces of the first segment + # indices = self.verts_by_segm[segments[0]] + # for segment in segments[1:] : + # indices = sorted([index for index in self.verts_by_segm[segment] if index in indices]) # Intersect current segment with current indices + # return sorted(set(indices)) + + @property + def joint_names(self): + return self.joint_regressors.keys() + + @property + def joint_xyz(self): + joint_locations = {} + for name in self.joint_names: + joint_locations[name] = self.joint_regressors[name]['offset'] + \ + np.sum(self.v[self.joint_regressors[name]['v_indices']].T * self.joint_regressors[name]['coeff'], axis=1) + return joint_locations + + # creates joint_regressors from a list of joint names and a per joint list of vertex indices (e.g. a ring of vertices) + # For the regression coefficients, all vertices for a given joint are given equal weight + def set_joints(self, joint_names, vertex_indices): + self.joint_regressors = {} + for name, indices in zip(joint_names, vertex_indices): + self.joint_regressors[name] = {'v_indices': indices, + 'coeff': [1.0 / len(indices)] * len(indices), + 'offset': np.array([0., 0., 0.])} + + def vertex_visibility(self, camera, normal_threshold=None, omni_directional_camera=False, binary_visiblity=True): + + vis, n_dot_cam = self.vertex_visibility_and_normals(camera, omni_directional_camera) + + if normal_threshold is not None: + vis = np.logical_and(vis, n_dot_cam > normal_threshold) + + return np.squeeze(vis) if binary_visiblity else np.squeeze(vis * n_dot_cam) + + def vertex_visibility_and_normals(self, camera, omni_directional_camera=False): + from .visibility import visibility_compute + arguments = {'v': self.v, + 'f': self.f, + 'cams': np.array([camera.origin.flatten()])} + + if not omni_directional_camera: + arguments['sensors'] = np.array([camera.sensor_axis.flatten()]) + + arguments['n'] = self.vn if hasattr(self, 'vn') else self.estimate_vertex_normals() + + return(visibility_compute(**arguments)) + + def visibile_mesh(self, camera=[0.0, 0.0, 0.0]): + vis = self.vertex_visibility(camera) + faces_to_keep = filter(lambda face: vis[face[0]] * vis[face[1]] * vis[face[2]], self.f) + vertex_indices_to_keep = np.nonzero(vis)[0] + vertices_to_keep = self.v[vertex_indices_to_keep] + old_to_new_indices = np.zeros(len(vis)) + old_to_new_indices[vertex_indices_to_keep] = range(len(vertex_indices_to_keep)) + return Mesh(v=vertices_to_keep, f=np.array([old_to_new_indices[face] for face in faces_to_keep])) + + def estimate_circumference(self, plane_normal, plane_distance, partNamesAllowed=None, want_edges=False): + raise Exception('estimate_circumference function has moved to body.mesh.metrics.circumferences') + + # ###################################################### + # Processing + def reset_normals(self, face_to_verts_sparse_matrix=None, reset_face_normals=False): + return processing.reset_normals(self, face_to_verts_sparse_matrix, reset_face_normals) + + def reset_face_normals(self): + return processing.reset_face_normals(self) + + def uniquified_mesh(self): + """This function returns a copy of the mesh in which vertices are copied such that + each vertex appears in only one face, and hence has only one texture""" + return processing.uniquified_mesh(self) + + def keep_vertices(self, keep_list): + return processing.keep_vertices(self, keep_list) + + def remove_vertices(self, v_list): + return self.keep_vertices(np.setdiff1d(np.arange(self.v.shape[0]), v_list)) + + def point_cloud(self): + return Mesh(v=self.v, f=[], vc=self.vc) if hasattr(self, 'vc') else Mesh(v=self.v, f=[]) + + def remove_faces(self, face_indices_to_remove): + return processing.remove_faces(self, face_indices_to_remove) + + def scale_vertices(self, scale_factor): + return processing.scale_vertices(self, scale_factor) + + def rotate_vertices(self, rotation): + return processing.rotate_vertices(self, rotation) + + def translate_vertices(self, translation): + return processing.translate_vertices(self, translation) + + def flip_faces(self): + return processing.flip_faces(self) + + def simplified(self, factor=None, n_verts_desired=None): + from .topology import qslim_decimator + return qslim_decimator(self, factor, n_verts_desired) + + def subdivide_triangles(self): + return processing.subdivide_triangles(self) + + def concatenate_mesh(self, mesh): + return processing.concatenate_mesh(self, mesh) + + # new_ordering specifies the new index of each vertex. If new_ordering[i] = j, + # vertex i should now be the j^th vertex. As such, each entry in new_ordering should be unique. + def reorder_vertices(self, new_ordering, new_normal_ordering=None): + processing.reorder_vertices(self, new_ordering, new_normal_ordering) + + # ###################################################### + # Landmark methods + + @property + def landm_names(self): + names = [] + if hasattr(self, 'landm_regressors') or hasattr(self, 'landm'): + names = self.landm_regressors.keys() if hasattr(self, 'landm_regressors') else self.landm.keys() + return list(names) + + @property + def landm_xyz(self, ordering=None): + landmark_order = ordering if ordering else self.landm_names + landmark_vertex_locations = (self.landm_xyz_linear_transform(landmark_order) * self.v.flatten()).reshape(-1, 3) if landmark_order else np.zeros((0, 0)) + return dict([(landmark_order[i], xyz) for i, xyz in enumerate(landmark_vertex_locations)]) if landmark_order else {} + + def set_landmarks_from_xyz(self, landm_raw_xyz): + self.landm_raw_xyz = landm_raw_xyz if hasattr(landm_raw_xyz, 'keys') else dict((str(i), l) for i, l in enumerate(landm_raw_xyz)) + self.recompute_landmark_indices() + + def landm_xyz_linear_transform(self, ordering=None): + return landmarks.landm_xyz_linear_transform(self, ordering) + + def recompute_landmark_xyz(self): + self.landm_raw_xyz = dict((name, self.v[ind]) for name, ind in self.landm.items()) + + def recompute_landmark_indices(self, landmark_fname=None, safe_mode=True): + landmarks.recompute_landmark_indices(self, landmark_fname, safe_mode) + + def set_landmarks_from_regressors(self, regressors): + self.landm_regressors = regressors + + def set_landmark_indices_from_any(self, landmark_file_or_values): + serialization.set_landmark_indices_from_any(self, landmark_file_or_values) + + def set_landmarks_from_raw(self, landmark_file_or_values): + landmarks.set_landmarks_from_raw(self, landmark_file_or_values) + + ####################################################### + # Texture methods + + @property + def texture_image(self): + if not hasattr(self, '_texture_image'): + self.reload_texture_image() + return self._texture_image + + def set_texture_image(self, path_to_texture): + self.texture_filepath = path_to_texture + + def texture_coordinates_by_vertex(self): + return texture.texture_coordinates_by_vertex(self) + + def reload_texture_image(self): + texture.reload_texture_image(self) + + def transfer_texture(self, mesh_with_texture): + texture.transfer_texture(self, mesh_with_texture) + + def load_texture(self, texture_version): + texture.load_texture(self, texture_version) + + def texture_rgb(self, texture_coordinate): + return texture.texture_rgb(self, texture_coordinate) + + def texture_rgb_vec(self, texture_coordinates): + return texture.texture_rgb_vec(self, texture_coordinates) + + ####################################################### + # Search methods + + def compute_aabb_tree(self): + return search.AabbTree(self) + + def compute_aabb_normals_tree(self): + return search.AabbNormalsTree(self) + + def compute_closest_point_tree(self, use_cgal=False): + return search.CGALClosestPointTree(self) if use_cgal else search.ClosestPointTree(self) + + def closest_vertices(self, vertices, use_cgal=False): + return self.compute_closest_point_tree(use_cgal).nearest(vertices) + + def closest_points(self, vertices): + return self.closest_faces_and_points(vertices)[1] + + def closest_faces_and_points(self, vertices): + return self.compute_aabb_tree().nearest(vertices) + + ####################################################### + # Serialization methods + + def load_from_file(self, filename): + serialization.load_from_file(self, filename) + + def load_from_ply(self, filename): + serialization.load_from_ply(self, filename) + + def load_from_obj(self, filename): + serialization.load_from_obj(self, filename) + + def write_json(self, filename, header="", footer="", name="", include_faces=True, texture_mode=True): + serialization.write_json(self, filename, header, footer, name, include_faces, texture_mode) + + def write_three_json(self, filename, name=""): + serialization.write_three_json(self, filename, name) + + def write_ply(self, filename, flip_faces=False, ascii=False, little_endian=True, comments=[]): + serialization.write_ply(self, filename, flip_faces, ascii, little_endian, comments) + + def write_mtl(self, path, material_name, texture_name): + """Serializes a material attributes file""" + serialization.write_mtl(self, path, material_name, texture_name) + + def write_obj(self, filename, flip_faces=False, group=False, comments=None): + serialization.write_obj(self, filename, flip_faces, group, comments) + + def load_from_obj_cpp(self, filename): + serialization.load_from_obj_cpp(self, filename) + + def set_landmark_indices_from_ppfile(self, ppfilename): + serialization.set_landmark_indices_from_ppfile(self, ppfilename) + + def set_landmark_indices_from_lmrkfile(self, lmrkfilename): + serialization.set_landmark_indices_from_lmrkfile(self, lmrkfilename) diff --git a/mesh-master/mesh/meshviewer.py b/mesh-master/mesh/meshviewer.py new file mode 100644 index 0000000000000000000000000000000000000000..e457759fe7a59f1f710f2c39eea9872a51f37995 --- /dev/null +++ b/mesh-master/mesh/meshviewer.py @@ -0,0 +1,1274 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2012 Max Planck Society. All rights reserved. +# Created by Matthew Loper on 2012-05-11. + +""" +Mesh visualization and related classes +-------------------------------------- + +This module contains the core visualization tools for meshes. The +backend used for visualization is OpenGL. + +The module itself can be run like the following + +.. code:: + + python -m psbody.mesh.meshviewer arguments + +The following commands are used + +* ``arguments=TEST_FOR_OPENGL`` a basic OpenGL support is run. This + is usually performed on a forked python process. In case OpenGL is + not supported, a `DummyClass`` mesh viewer is returned. + +* ``arguments=title nb_x_axis nb_y_axis width height`` a new window is + created + +.. autosummary:: + + MeshViewer + MeshViewers + MeshViewerLocal + test_for_opengl +""" + +import copy +import logging +import multiprocessing +import os +import re +import subprocess +import sys +import tempfile +import time +import traceback + +import numpy as np +from OpenGL import GL, GLU, GLUT +from OpenGL.arrays.vbo import VBO +from PIL import Image +import zmq + +# if this file is processed/run as a python script/standalone, especially from the +# internal command +if __package__ is not None: + from .arcball import ( + ArcBallT, Matrix3fT, Matrix4fT, Point2fT, + Matrix3fMulMatrix3f, Matrix3fSetRotationFromQuat4f, + Matrix4fSetRotationFromMatrix3f) + from .geometry.tri_normals import TriNormals + from .fonts import get_textureid_with_text + from .mesh import Mesh + + +# this block is below the previous one to make my linter happy +if __package__ is None: + print("this file cannot be executed as a standalone python module") + print("python -m psbody.mesh.%s arguments" % (os.path.splitext(os.path.basename(__file__))[0])) + sys.exit(-1) + + +# Default transport and host are such that we can listen for incoming +# network connections. +ZMQ_TRANSPORT = "tcp" +ZMQ_HOST = "0.0.0.0" +# The dynamic port range. +ZMQ_PORT_MIN = 49152 +ZMQ_PORT_MAX = 65535 + +MESH_VIEWER_DEFAULT_TITLE = "Mesh Viewer" +MESH_VIEWER_DEFAULT_SHAPE = (1, 1) +MESH_VIEWER_DEFAULT_WIDTH = 1280 +MESH_VIEWER_DEFAULT_HEIGHT = 960 + + +def _run_self(args, stdin=None, stdout=None, stderr=None): + """Executes this same script module with the given arguments (forking without subprocess dependencies)""" + return subprocess.Popen([sys.executable] + + ['-m'] + ['%s.%s' % (__package__, os.path.splitext(os.path.basename(__file__))[0])] + + args, + stdin=stdin, + stdout=stdout, # if stdout is not None else subprocess.PIPE, + stderr=stderr) + + +def _test_for_opengl(): + try: + # from OpenGL.GLUT import glutInit + GLUT.glutInit() + except Exception as e: + print(e, file=sys.stderr) + print('failure') + else: + print('success') + + +test_for_opengl_cached = None + + +def test_for_opengl(): + """Tests if opengl is supported. + + .. note:: the result of the test is cached + + """ + + global test_for_opengl_cached + if test_for_opengl_cached is None: + + with open(os.devnull) as dev_null, \ + tempfile.TemporaryFile() as out, \ + tempfile.TemporaryFile() as err: + + p = _run_self(["TEST_FOR_OPENGL"], + stdin=dev_null, + stdout=out, + stderr=err) + p.wait() + + out.seek(0) + err.seek(0) + + line = ''.join(out.read().decode()) + test_for_opengl_cached = 'success' in line + if not test_for_opengl_cached: + print('OpenGL test failed: ') + print('\tstdout:', line) + print('\tstderr:', '\n'.join(err.read().decode())) + + return test_for_opengl_cached + + +class Dummy: + + def __getattr__(self, name): + return Dummy() + + def __call__(self, *args, **kwargs): + return Dummy() + + def __getitem__(self, key): + return Dummy() + + def __setitem__(self, key, value): + pass + + +def MeshViewer(titlebar='Mesh Viewer', + static_meshes=None, + static_lines=None, + uid=None, + autorecenter=True, + shape=(1, 1), + keepalive=True, + window_width=1280, + window_height=960, + snapshot_camera=None): + """Allows visual inspection of geometric primitives. + + Write-only Attributes: + + :param titlebar: string printed in the window titlebar + :param static_meshes: list of Mesh objects to be displayed + :param static_lines: list of Lines objects to be displayed + + .. note:: `static_meshes` is meant for Meshes that are updated infrequently, + `and dynamic_meshes` is for Meshes that are updated frequently + (same for `dynamic_lines` vs. `static_lines`). + They may be treated differently for performance reasons. + + """ + + if not test_for_opengl(): + return Dummy() + + mv = MeshViewerLocal(shape=(1, 1), + uid=uid, + titlebar=titlebar, + keepalive=keepalive, + window_width=window_width, + window_height=window_height) + result = mv.get_subwindows()[0][0] + result.snapshot_camera = snapshot_camera + if static_meshes: + result.static_meshes = static_meshes + if static_lines: + result.static_lines = static_lines + result.autorecenter = autorecenter + + return result + + +def MeshViewers(shape=(1, 1), + titlebar="Mesh Viewers", + keepalive=True, + window_width=1280, + window_height=960): + """Allows subplot-style inspection of primitives in multiple subwindows. + + :param shape: a tuple indicating the number of vertical and horizontal windows requested + :param titlebar: the title appearing on the created window + + + Returns: a list of lists of MeshViewer objects: one per window requested. + """ + + if not test_for_opengl(): + return Dummy() + + mv = MeshViewerLocal(shape=shape, + titlebar=titlebar, + uid=None, + keepalive=keepalive, + window_width=window_width, + window_height=window_height) + return mv.get_subwindows() + + +class MeshSubwindow: + + def __init__(self, parent_window, which_window): + self.parent_window = parent_window + self.which_window = which_window + + def set_dynamic_meshes(self, list_of_meshes, blocking=False): + self.parent_window.set_dynamic_meshes(list_of_meshes, blocking, self.which_window) + + def set_static_meshes(self, list_of_meshes, blocking=False): + self.parent_window.set_static_meshes(list_of_meshes, blocking, self.which_window) + + # list_of_model_names_and_parameters should be of form [{'name': scape_model_name, 'parameters': scape_model_parameters}] + # here scape_model_name is the filepath of the scape model. + def set_dynamic_models(self, list_of_model_names_and_parameters, blocking=False): + self.parent_window.set_dynamic_models(list_of_model_names_and_parameters, blocking, self.which_window) + + def set_dynamic_lines(self, list_of_lines, blocking=False): + self.parent_window.set_dynamic_lines(list_of_lines, blocking, self.which_window) + + def set_static_lines(self, list_of_lines, blocking=False): + self.parent_window.set_static_lines(list_of_lines, blocking=blocking, which_window=self.which_window) + + def set_titlebar(self, titlebar, blocking=False): + self.parent_window.set_titlebar(titlebar, blocking, which_window=self.which_window) + + def set_lighting_on(self, lighting_on, blocking=True): + self.parent_window.set_lighting_on(lighting_on, blocking=blocking, which_window=self.which_window) + + def set_autorecenter(self, autorecenter, blocking=False): + self.parent_window.set_autorecenter(autorecenter, blocking=blocking, which_window=self.which_window) + + def set_background_color(self, background_color, blocking=False): + self.parent_window.set_background_color(background_color, blocking=blocking, which_window=self.which_window) + + def save_snapshot(self, path, blocking=False): + self.parent_window.save_snapshot( + path, blocking=blocking, which_window=self.which_window) + + def get_event(self): + return self.parent_window.get_event() + + def get_keypress(self): + return self.parent_window.get_keypress()['key'] + + def get_mouseclick(self): + return self.parent_window.get_mouseclick() + + def close(self): + self.parent_window.p.terminate() + + background_color = property(fset=set_background_color, doc="Background color, as 3-element numpy array where 0 <= color <= 1.0.") + dynamic_meshes = property(fset=set_dynamic_meshes, doc="List of meshes for dynamic display.") + static_meshes = property(fset=set_static_meshes, doc="List of meshes for static display.") + dynamic_models = property(fset=set_dynamic_models, doc="List of model names and parameters for dynamic display.") + dynamic_lines = property(fset=set_dynamic_lines, doc="List of Lines for dynamic display.") + static_lines = property(fset=set_static_lines, doc="List of Lines for static display.") + titlebar = property(fset=set_titlebar, doc="Titlebar string.") + lighting_on = property(fset=set_lighting_on, doc="Titlebar string.") + + +class MeshViewerSingle: + + def __init__(self, x1_pct, y1_pct, width_pct, height_pct): + assert(width_pct <= 1) + assert(height_pct <= 1) + self.dynamic_meshes = [] + self.static_meshes = [] + self.dynamic_models = [] + self.dynamic_lines = [] + self.static_lines = [] + self.lighting_on = True + self.scape_models = {} + self.x1_pct = x1_pct + self.y1_pct = y1_pct + self.width_pct = width_pct + self.height_pct = height_pct + self.autorecenter = True + + def get_dimensions(self): + d = {} + d['window_width'] = GLUT.glutGet(GLUT.GLUT_WINDOW_WIDTH) + d['window_height'] = GLUT.glutGet(GLUT.GLUT_WINDOW_HEIGHT) + d['subwindow_width'] = self.width_pct * d['window_width'] + d['subwindow_height'] = self.height_pct * d['window_height'] + d['subwindow_origin_x'] = self.x1_pct * d['window_width'] + d['subwindow_origin_y'] = self.y1_pct * d['window_height'] + return d + + def on_draw(self, transform, want_camera=False): + + d = self.get_dimensions() + + GL.glViewport( + int(d['subwindow_origin_x']), + int(d['subwindow_origin_y']), + int(d['subwindow_width']), + int(d['subwindow_height'])) + + GL.glMatrixMode(GL.GL_PROJECTION) + GL.glLoadIdentity() + + fov_degrees = 45. + near = 1.0 + far = 100. + ratio = float(d['subwindow_width']) / float(d['subwindow_height']) + if d['subwindow_width'] < d['subwindow_height']: + xt = np.tan(fov_degrees * np.pi / 180. / 2.0) * near + yt = xt / ratio + GL.glFrustum(-xt, xt, -yt, yt, near, far) + else: + GLU.gluPerspective(fov_degrees, ratio, near, far) + + GL.glMatrixMode(GL.GL_MODELVIEW) + GL.glLoadIdentity() + GL.glLightModeli(GL.GL_LIGHT_MODEL_TWO_SIDE, GL.GL_TRUE) + + GL.glTranslatef(0.0, 0.0, -6.0) +# GL.glTranslatef(0.0,0.0,-3.5) + + GL.glPushMatrix() + GL.glMultMatrixf(transform) + GL.glColor3f(1.0, 0.75, 0.75) + + if self.autorecenter: + camera = self.draw_primitives_recentered(want_camera=want_camera) + else: + if hasattr(self, 'current_center') and hasattr(self, 'current_scalefactor'): + camera = self.draw_primitives(scalefactor=self.current_scalefactor, center=self.current_center) + else: + camera = self.draw_primitives(want_camera=want_camera) + + GL.glPopMatrix() + + if want_camera: + return camera + + def draw_primitives_recentered(self, want_camera=False): + return self.draw_primitives(recenter=True, want_camera=want_camera) + + @staticmethod + def set_shaders(m): + VERTEX_SHADER = GL.shaders.compileShader("""void main() { + gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex; + }""", GL.GL_VERTEX_SHADER) + FRAGMENT_SHADER = GL.shaders.compileShader("""void main() { + gl_FragColor = vec4( 0, 1, 0, 1 ); + }""", GL.GL_FRAGMENT_SHADER) + m.shaders = GL.shaders.compileProgram(VERTEX_SHADER, FRAGMENT_SHADER) + + @staticmethod + def set_texture(m): + texture_data = np.array(m.texture_image, dtype='int8') + m.textureID = GL.glGenTextures(1) + GL.glPixelStorei(GL.GL_UNPACK_ALIGNMENT, 1) + GL.glBindTexture(GL.GL_TEXTURE_2D, m.textureID) + GL.glTexImage2D(GL.GL_TEXTURE_2D, 0, GL.GL_RGB, texture_data.shape[1], texture_data.shape[0], 0, GL.GL_BGR, GL.GL_UNSIGNED_BYTE, texture_data.flatten()) + GL.glHint(GL.GL_GENERATE_MIPMAP_HINT, GL.GL_NICEST) # must be GL_FASTEST, GL.GL_NICEST or GL_DONT_CARE + GL.glGenerateMipmap(GL.GL_TEXTURE_2D) + + @staticmethod + def draw_mesh(m, lighting_on): + + # Supply vertices + GL.glEnableClientState(GL.GL_VERTEX_ARRAY) + m.vbo['v'].bind() + GL.glVertexPointer(3, GL.GL_FLOAT, 0, m.vbo['v']) + m.vbo['v'].unbind() + + # Supply normals + if 'vn' in m.vbo.keys(): + GL.glEnableClientState(GL.GL_NORMAL_ARRAY) + m.vbo['vn'].bind() + GL.glNormalPointer(GL.GL_FLOAT, 0, m.vbo['vn']) + m.vbo['vn'].unbind() + else: + GL.glDisableClientState(GL.GL_NORMAL_ARRAY) + + # Supply colors + if 'vc' in m.vbo.keys(): + GL.glEnableClientState(GL.GL_COLOR_ARRAY) + m.vbo['vc'].bind() + GL.glColorPointer(3, GL.GL_FLOAT, 0, m.vbo['vc']) + m.vbo['vc'].unbind() + else: + GL.glDisableClientState(GL.GL_COLOR_ARRAY) + + if ('vt' in m.vbo.keys()) and hasattr(m, 'textureID'): + GL.glEnable(GL.GL_TEXTURE_2D) + GL.glTexParameterf(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MAG_FILTER, GL.GL_NEAREST) + GL.glTexParameterf(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MIN_FILTER, GL.GL_NEAREST) + GL.glTexEnvf(GL.GL_TEXTURE_ENV, GL.GL_TEXTURE_ENV_MODE, GL.GL_MODULATE) + GL.glBindTexture(GL.GL_TEXTURE_2D, m.textureID) + + GL.glEnableClientState(GL.GL_TEXTURE_COORD_ARRAY) + m.vbo['vt'].bind() + GL.glTexCoordPointer(2, GL.GL_FLOAT, 0, m.vbo['vt']) + m.vbo['vt'].unbind() + else: + GL.glDisable(GL.GL_TEXTURE_2D) + GL.glDisableClientState(GL.GL_TEXTURE_COORD_ARRAY) + + # Draw + if len(m.f) > 0: + # ie if it is triangulated + if lighting_on: + GL.glEnable(GL.GL_LIGHTING) + else: + GL.glDisable(GL.GL_LIGHTING) + GL.glDrawElementsui(GL.GL_TRIANGLES, np.arange(m.f.size, dtype=np.uint32)) + else: + # not triangulated, so disable lighting + GL.glDisable(GL.GL_LIGHTING) + GL.glPointSize(2) + GL.glDrawElementsui(GL.GL_POINTS, np.arange(len(m.v), dtype=np.uint32)) + if hasattr(m, 'v_to_text'): + + GL.glEnable(GL.GL_TEXTURE_2D) + GL.glTexParameterf(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MAG_FILTER, GL.GL_LINEAR) + GL.glTexParameterf(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MIN_FILTER, GL.GL_LINEAR_MIPMAP_LINEAR) + GL.glTexEnvf(GL.GL_TEXTURE_ENV, GL.GL_TEXTURE_ENV_MODE, GL.GL_DECAL) + + bgcolor = np.array(GL.glGetDoublev(GL.GL_COLOR_CLEAR_VALUE)) + fgcolor = 1. - bgcolor + + from .lines import Lines + sc = float(np.max(np.max(m.v, axis=0) - np.min(m.v, axis=0))) / 10. + + cur_mtx = np.linalg.pinv(GL.glGetFloatv(GL.GL_MODELVIEW_MATRIX).T) + xdir = cur_mtx[:3, 0] + ydir = cur_mtx[:3, 1] + + GL.glEnable(GL.GL_LINE_SMOOTH) + GL.glEnable(GL.GL_BLEND) + GL.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA) + + for vidx, text in m.v_to_text.items(): + pos0 = m.v[vidx].copy() + pos1 = m.v[vidx].copy() + if hasattr(m, 'vn'): + pos1 += m.vn[vidx] * sc + GL.glLineWidth(5.0) + ln = Lines(v=np.vstack((pos0, pos1)), e=np.array([[0, 1]])) + GL.glEnable(GL.GL_LIGHTING) + GL.glColor3f(1. - 0.8, 1. - 0.8, 1. - 1.00) + MeshViewerSingle.draw_lines(ln) + + GL.glDisable(GL.GL_LIGHTING) + + texture_id = get_textureid_with_text(text, bgcolor, fgcolor) + GL.glBindTexture(GL.GL_TEXTURE_2D, texture_id) + + GL.glPushMatrix() + GL.glTranslatef(pos1[0], pos1[1], pos1[2]) + + dx = xdir * .10 + dy = ydir * .10 + if False: + GL.glBegin(GL.GL_QUADS) + + GL.glTexCoord2f(1., 0.) + GL.glVertex3f(*(+dx + dy)) + + GL.glTexCoord2f(1., 1.) + GL.glVertex3f(*(+dx - dy)) + + GL.glTexCoord2f(0., 1.) + GL.glVertex3f(*(-dx - dy)) + + GL.glTexCoord2f(0., 0.) + GL.glVertex3f(*(-dx + dy)) + + # gluSphere(quadratic,0.05,32,32) + GL.glEnd() + else: + GL.glBegin(GL.GL_POLYGON) + + for r in np.arange(0, np.pi * 2., .01): + GL.glTexCoord2f(np.cos(r) / 2. + .5, np.sin(r) / 2. + .5) + GL.glVertex3f(*(dx * np.cos(r) + -dy * np.sin(r))) + + GL.glEnd() + GL.glPopMatrix() + + + @staticmethod + def draw_lines(ls): + GL.glDisableClientState(GL.GL_NORMAL_ARRAY) + GL.glEnableClientState(GL.GL_VERTEX_ARRAY) + GL.glLineWidth(3.0) + allpts = ls.v[ls.e.flatten()].astype(np.float32) + GL.glVertexPointerf(allpts) + if hasattr(ls, 'vc') or hasattr(ls, 'ec'): + GL.glEnableClientState(GL.GL_COLOR_ARRAY) + if hasattr(ls, 'vc'): + GL.glColorPointerf(ls.vc[ls.e.flatten()].astype(np.float32)) + else: + clrs = np.ones((ls.e.shape[0] * 2, 3)) * np.repeat(ls.ec, 2, axis=0) + GL.glColorPointerf(clrs) + else: + GL.glDisableClientState(GL.GL_COLOR_ARRAY) + + GL.glDisable(GL.GL_LIGHTING) + GL.glDrawElementsui(GL.GL_LINES, np.arange(len(allpts), dtype=np.uint32)) + + def draw_primitives(self, + scalefactor=1.0, + center=[0.0, 0.0, 0.0], + recenter=False, + want_camera=False): + + # measure the bounding box of all our primitives, so that we can + # recenter them in our field of view + if recenter: + all_meshes = self.static_meshes + self.dynamic_meshes + all_lines = self.static_lines + self.dynamic_lines + + if (len(all_meshes) + len(all_lines)) == 0: + if want_camera: + return {'modelview_matrix': GL.glGetDoublev(GL.GL_MODELVIEW_MATRIX), + 'projection_matrix': GL.glGetDoublev(GL.GL_PROJECTION_MATRIX), + 'viewport': GL.glGetIntegerv(GL.GL_VIEWPORT) + } + else: + return None + + for m in all_meshes: + m.v = m.v.reshape((-1, 3)) + + all_verts = np.concatenate( + [m.v[m.f.flatten() if len(m.f) > 0 else np.arange(len(m.v))] for m in all_meshes] + + [l.v[l.e.flatten()] for l in all_lines], + axis=0) + + maximum = np.max(all_verts, axis=0) + minimum = np.min(all_verts, axis=0) + center = (maximum + minimum) / 2. + scalefactor = (maximum - minimum) / 4. + scalefactor = np.max(scalefactor) + else: + center = np.array(center) +# for mesh in self.dynamic_meshes : +# if mesh.f : mesh.reset_normals() + all_meshes = self.static_meshes + self.dynamic_meshes + all_lines = self.static_lines + self.dynamic_lines + self.current_center = center + self.current_scalefactor = scalefactor + + GL.glMatrixMode(GL.GL_MODELVIEW) + GL.glPushMatrix() + # uncomment to add a default rotation (useful when automatically snapshoting kinect data + # glRotate(220, 0.0, 1.0, 0.0) + + tf = np.identity(4, 'f') / scalefactor + tf[:3, 3] = -center / scalefactor + tf[3, 3] = 1 + cur_mtx = GL.glGetFloatv(GL.GL_MODELVIEW_MATRIX).T + + GL.glLoadMatrixf(cur_mtx.dot(tf).T) + + if want_camera: + result = {'modelview_matrix': GL.glGetDoublev(GL.GL_MODELVIEW_MATRIX), + 'projection_matrix': GL.glGetDoublev(GL.GL_PROJECTION_MATRIX), + 'viewport': GL.glGetIntegerv(GL.GL_VIEWPORT) + } + else: + result = None + + for m in all_meshes: + if not hasattr(m, 'vbo'): + # Precompute vertex vbo + fidxs = m.f.flatten() if len(m.f) > 0 else np.arange(len(m.v)) + allpts = m.v[fidxs].astype(np.float32).flatten() + vbo = VBO(allpts) + m.vbo = {'v': vbo} + + # Precompute normals vbo + if hasattr(m, 'vn'): + ns = m.vn.astype(np.float32) + ns = ns[m.f.flatten(), :] + m.vbo['vn'] = VBO(ns.flatten()) + elif hasattr(m, 'f') and m.f.size > 0: + ns = TriNormals(m.v, m.f).reshape(-1, 3) + ns = np.tile(ns, (1, 3)).reshape(-1, 3).astype(np.float32) + m.vbo['vn'] = VBO(ns.flatten()) + + # Precompute texture vbo + if hasattr(m, 'ft') and (m.ft.size > 0): + ftidxs = m.ft.flatten() + data = m.vt[ftidxs].astype(np.float32)[:, 0:2] + data[:, 1] = 1.0 - 1.0 * data[:, 1] + m.vbo['vt'] = VBO(data) + + # Precompute color vbo + if hasattr(m, 'vc'): + data = m.vc[fidxs].astype(np.float32) + m.vbo['vc'] = VBO(data) + elif hasattr(m, 'fc'): + data = np.tile(m.fc, (1, 3)).reshape(-1, 3).astype(np.float32) + m.vbo['vc'] = VBO(data) + + for e in all_lines: + self.draw_lines(e) + + for m in all_meshes: + if hasattr(m, 'texture_image') and not hasattr(m, 'textureID'): + self.set_texture(m) + self.draw_mesh(m, self.lighting_on) + + GL.glMatrixMode(GL.GL_MODELVIEW) + GL.glPopMatrix() + + return result + + +class MeshViewerLocal: + """Proxy viewer instance for visual inspection of geometric primitives. + + The class forks another python process holding the display. It communicates + the commands with the remote instance seamlessly. + + Write-only attributes: + + :param titlebar: string printed in the window titlebar + :param dynamic_meshes: list of Mesh objects to be displayed + :param static_meshes: list of Mesh objects to be displayed + :param dynamic_lines: list of Lines objects to be displayed + :param static_lines: list of Lines objects to be displayed + + .. note:: + + `static_meshes` is meant for Meshes that are + updated infrequently, and dynamic_meshes is for Meshes + that are updated frequently (same for dynamic_lines vs + static_lines). They may be treated differently for + performance reasons. + + """ + + managed = {} + + def __new__( + cls, + titlebar=MESH_VIEWER_DEFAULT_TITLE, + uid=None, + host=ZMQ_HOST, + port=None, + shape=MESH_VIEWER_DEFAULT_SHAPE, + keepalive=False, + window_width=MESH_VIEWER_DEFAULT_WIDTH, + window_height=MESH_VIEWER_DEFAULT_HEIGHT + ): + + assert(uid is None or isinstance(uid, str)) + + if uid == 'stack': + uid = ''.join(traceback.format_list(traceback.extract_stack())) + if uid and uid in MeshViewer.managed.keys(): + return MeshViewer.managed[uid] + + viewer = super(MeshViewerLocal, cls).__new__(cls) + + viewer.remote_host = host + viewer.remote_port = port + + viewer.client = zmq.Context.instance().socket(zmq.PUSH) + viewer.client.linger = 0 + + if viewer.remote_port: + addr = "{}://{}:{}".format( + ZMQ_TRANSPORT, + viewer.remote_host, + viewer.remote_port) + viewer.client.connect(addr) + + # XXX: Proper shape querying over the network is not + # possible as of now. This should be tackled during + # refactoring in the future. + + # viewer.shape = viewer.get_window_shape() + viewer.shape = (1, 1) + + return viewer + + with open(os.devnull) as dev_null, \ + tempfile.TemporaryFile() as err: + + viewer.p = _run_self([titlebar, str(shape[0]), str(shape[1]), str(window_width), str(window_height)], + stdin=dev_null, + stdout=subprocess.PIPE, + stderr=err) + + line = viewer.p.stdout.readline().decode() + viewer.p.stdout.close() + current_port = re.match('(.*?)', line) + if not current_port: + raise Exception("MeshViewer remote appears to have failed to launch") + current_port = int(current_port.group(1)) + viewer.client.connect('{}://{}:{}'.format(ZMQ_TRANSPORT, ZMQ_HOST, current_port)) + + logging.info( + "started remote viewer on port {}".format(current_port)) + + if uid: + MeshViewerLocal.managed[uid] = viewer + viewer.shape = shape + viewer.keepalive = keepalive + return viewer + + def get_subwindows(self): + return [[MeshSubwindow(parent_window=self, which_window=(r, c)) for c in range(self.shape[1])] for r in range(self.shape[0])] + + @staticmethod + def _sanitize_meshes(list_of_meshes): + lm = [] + + # have to copy the meshes for now, because some contain CPython members, + # before pushing them on the queue + for m in list_of_meshes: + if hasattr(m, 'fc'): + lm.append(Mesh(v=m.v, f=m.f, fc=m.fc)) + elif hasattr(m, 'vc'): + lm.append(Mesh(v=m.v, f=m.f, vc=m.vc)) + else: + lm.append(Mesh(v=m.v, f=m.f if hasattr(m, 'f') else [])) + + if hasattr(m, 'vn'): + lm[-1].vn = m.vn + if hasattr(m, 'fn'): + lm[-1].fn = m.fn + + if hasattr(m, 'v_to_text'): + lm[-1].v_to_text = m.v_to_text + if hasattr(m, 'texture_filepath') and hasattr(m, 'vt') and hasattr(m, 'ft'): + lm[-1].texture_filepath = m.texture_filepath + lm[-1].vt = m.vt + lm[-1].ft = m.ft + + return lm + + def _send_pyobj(self, label, obj, blocking, which_window): + logging.debug("sending a request:") + logging.debug("\tlabel = {!r}".format(label)) + logging.debug("\tobj = {!r}".format(obj)) + logging.debug("\tblocking = {!r}".format(blocking)) + logging.debug("\twhich_window = {!r}".format(which_window)) + + if blocking: + context = zmq.Context.instance() + server = context.socket(zmq.PULL) + server.linger = 0 + port = server.bind_to_random_port( + "{}://{}".format(ZMQ_TRANSPORT, ZMQ_HOST), + min_port=ZMQ_PORT_MIN, + max_port=ZMQ_PORT_MAX, + max_tries=100000) + + # sending with blocking' + self.client.send_pyobj({ + 'label': label, + 'obj': obj, + 'port': port, + 'which_window': which_window + }) + + task_completion_time = server.recv_pyobj() + # task completion time was %.2fs in other process' % (task_completion_time,) + server.close() + else: + # sending nonblocking + res = self.client.send_pyobj({ + 'label': label, + 'obj': obj, + 'which_window': which_window + }) + + def _recv_pyobj(self, label, port=None): + context = zmq.Context.instance() + server = context.socket(zmq.PULL) + server.linger = 0 + + if not port: + port = server.bind_to_random_port( + "{}://{}".format(ZMQ_TRANSPORT, ZMQ_HOST), + min_port=ZMQ_PORT_MIN, + max_port=ZMQ_PORT_MAX, + max_tries=100000) + + self._send_pyobj(label, port, blocking=True, which_window=(0, 0)) + result = server.recv_pyobj() + server.close() + + return result + + def set_dynamic_meshes(self, list_of_meshes, blocking=False, which_window=(0, 0)): + self._send_pyobj('dynamic_meshes', self._sanitize_meshes(list_of_meshes), blocking, which_window) + + def set_static_meshes(self, list_of_meshes, blocking=False, which_window=(0, 0)): + self._send_pyobj('static_meshes', self._sanitize_meshes(list_of_meshes), blocking, which_window) + + # list_of_model_names_and_parameters should be of form [{'name': scape_model_name, 'parameters': scape_model_parameters}] + # here scape_model_name is the filepath of the scape model. + def set_dynamic_models(self, list_of_model_names_and_parameters, blocking=False, which_window=(0, 0)): + self._send_pyobj('dynamic_models', list_of_model_names_and_parameters, blocking, which_window) + + def set_dynamic_lines(self, list_of_lines, blocking=False, which_window=(0, 0)): + self._send_pyobj('dynamic_lines', list_of_lines, blocking, which_window) + + def set_static_lines(self, list_of_lines, blocking=False, which_window=(0, 0)): + self._send_pyobj('static_lines', list_of_lines, blocking, which_window) + + def set_titlebar(self, titlebar, blocking=False, which_window=(0, 0)): + self._send_pyobj('titlebar', titlebar, blocking, which_window) + + def set_lighting_on(self, lighting_on, blocking=False, which_window=(0, 0)): + self._send_pyobj('lighting_on', lighting_on, blocking, which_window) + + def set_autorecenter(self, autorecenter, blocking=False, which_window=(0, 0)): + self._send_pyobj('autorecenter', autorecenter, blocking, which_window) + + def set_background_color(self, background_color, blocking=False, which_window=(0, 0)): + assert(isinstance(background_color, np.ndarray)) + assert(background_color.size == 3) + self._send_pyobj('background_color', background_color.flatten(), blocking, which_window) + + def get_keypress(self): + return self._recv_pyobj('get_keypress') + + def get_mouseclick(self): + """Returns a mouse click event. + + .. note:: + + the call is blocking the caller until an event is received + """ + return self._recv_pyobj('get_mouseclick') + + def get_event(self): + return self._recv_pyobj('get_event') + + def get_window_shape(self): + response = self._recv_pyobj("get_window_shape") + return response["shape"] + + background_color = property(fset=set_background_color, + doc="Background color, as 3-element numpy array where 0 <= color <= 1.0.") + + dynamic_meshes = property(fset=set_dynamic_meshes, + doc="List of meshes for dynamic display.") + static_meshes = property(fset=set_static_meshes, + doc="List of meshes for static display.") + dynamic_models = property(fset=set_dynamic_models, + doc="List of model names and parameters for dynamic display.") + + dynamic_lines = property(fset=set_dynamic_lines, + doc="List of Lines for dynamic display.") + static_lines = property(fset=set_static_lines, + doc="List of Lines for static display.") + + titlebar = property(fset=set_titlebar, + doc="Titlebar string.") + + def save_snapshot(self, path, blocking=False, which_window=(0, 0), wait_time=1): + """Saves a snapshot of the current window into the specified file + + :param path: filename to which the current window content will be saved + :param wait_time: waiting time to save snapshot. Increase it if the image is incomplete + """ + print('Saving snapshot to %s, please wait...' % path) + self._send_pyobj('save_snapshot', path, blocking, which_window) + time.sleep(wait_time) + + def __del__(self): + if hasattr(self, "p") and not self.keepalive: + self.p.terminate() + + +class MeshViewerRemote: + def __init__( + self, + titlebar=MESH_VIEWER_DEFAULT_TITLE, + subwins_vert=MESH_VIEWER_DEFAULT_SHAPE[1], + subwins_horz=MESH_VIEWER_DEFAULT_SHAPE[0], + width=MESH_VIEWER_DEFAULT_WIDTH, + height=MESH_VIEWER_DEFAULT_HEIGHT, + port=None + ): + + context = zmq.Context.instance() + self.server = context.socket(zmq.PULL) + self.server.linger = 0 + + if not port: + self.port = self.server.bind_to_random_port( + "{}://{}".format(ZMQ_TRANSPORT, ZMQ_HOST), + min_port=ZMQ_PORT_MIN, + max_port=ZMQ_PORT_MAX, + max_tries=100000) + else: + self.server.bind( + "{}://{}:{}".format(ZMQ_TRANSPORT, ZMQ_HOST, port)) + self.port = port + + logging.debug( + "listening for incoming messages on port {}" + .format(self.port)) + + # Print out our port so that our client can connect to us with it. Flush stdout immediately; otherwise + # our client could wait forever. + print('%d\n' % (self.port,)) + sys.stdout.flush() + + self.arcball = ArcBallT(width, height) + self.transform = Matrix4fT() + self.lastrot = Matrix3fT() + self.thisrot = Matrix3fT() + self.isdragging = False + self.need_redraw = True + + self.shape = (subwins_vert, subwins_horz) + self.mesh_viewers = [ + [ + MeshViewerSingle( + float(c) / (subwins_horz), + float(r) / (subwins_vert), + 1. / subwins_horz, + 1. / subwins_vert) + for c in range(subwins_horz) + ] + for r in range(subwins_vert) + ] + + self.tm_for_fps = 0. + self.titlebar = titlebar + self.activate(width, height) + + def snapshot(self, path, wait_time=0.1): + """ + Takes a snapshot of the meshviewer window and saves it to disc. + + :param path: path to save the snapshot at. + :param wait_time: waiting time to save snapshot. Increase it if the image is incomplete + + .. note:: Requires the Pillow package to be installed. + """ + self.on_draw() + time.sleep(wait_time) + + width = GLUT.glutGet(GLUT.GLUT_WINDOW_WIDTH) + height = GLUT.glutGet(GLUT.GLUT_WINDOW_HEIGHT) + + data = (GLU.GLubyte * (3 * width * height))(0) + GL.glReadPixels(0, 0, width, height, GL.GL_RGB, GL.GL_UNSIGNED_BYTE, data) + image = Image.frombytes(mode="RGB", size=(width, height), data=data) + image = image.transpose(Image.FLIP_TOP_BOTTOM) + + # Save image to disk + image.save(path) + + def activate(self, width, height): + GLUT.glutInit(['mesh_viewer']) + GLUT.glutInitDisplayMode(GLUT.GLUT_RGBA | GLUT.GLUT_DOUBLE | GLUT.GLUT_ALPHA | GLUT.GLUT_DEPTH) + GLUT.glutInitWindowSize(width, height) + GLUT.glutInitWindowPosition(0, 0) + self.root_window_id = GLUT.glutCreateWindow(self.titlebar) + + GLUT.glutTimerFunc(100, self.checkQueue, 0) + GLUT.glutReshapeFunc(self.on_resize_window) + + GLUT.glutKeyboardFunc(self.on_keypress) + GLUT.glutMouseFunc(self.on_click) + GLUT.glutMotionFunc(self.on_drag) + GLUT.glutDisplayFunc(self.on_draw) + + self.init_opengl() + + GLUT.glutMainLoop() # won't return until process is killed + + def on_drag(self, cursor_x, cursor_y): + """ Mouse cursor is moving + Glut calls this function (when mouse button is down) + and pases the mouse cursor postion in window coords as the mouse moves. + """ + from .geometry.rodrigues import rodrigues + if (self.isdragging): + mouse_pt = Point2fT(cursor_x, cursor_y) + ThisQuat = self.arcball.drag(mouse_pt) # // Update End Vector And Get Rotation As Quaternion + self.thisrot = Matrix3fSetRotationFromQuat4f(ThisQuat) # // Convert Quaternion Into Matrix3fT + # Use correct Linear Algebra matrix multiplication C = A * B + self.thisrot = Matrix3fMulMatrix3f(self.lastrot, self.thisrot) # // Accumulate Last Rotation Into This One + + # make sure it is a rotation + self.thisrot = rodrigues(rodrigues(self.thisrot)[0])[0] + self.transform = Matrix4fSetRotationFromMatrix3f(self.transform, self.thisrot) # // Set Our Final Transform's Rotation From This One + GLUT.glutPostRedisplay() + return + + # The function called whenever a key is pressed. Note the use of Python tuples to pass in: (key, x, y) + def on_keypress(self, *args): + key = args[0] + if hasattr(self, 'event_port'): + self.keypress_port = self.event_port + del self.event_port + if hasattr(self, 'keypress_port'): + client = zmq.Context.instance().socket(zmq.PUSH) + client.connect('{}://{}:{}'.format(ZMQ_TRANSPORT, ZMQ_HOST, self.keypress_port)) + client.send_pyobj({'event_type': 'keyboard', 'key': key}) + del self.keypress_port + + def on_click(self, button, button_state, cursor_x, cursor_y): + """ Mouse button clicked. + Glut calls this function when a mouse button is + clicked or released. + """ + + self.isdragging = False + + if (button == GLUT.GLUT_LEFT_BUTTON and button_state == GLUT.GLUT_UP): + # Left button released + self.lastrot = copy.copy(self.thisrot) # Set Last Static Rotation To Last Dynamic One + + elif (button == GLUT.GLUT_LEFT_BUTTON and button_state == GLUT.GLUT_DOWN): + # Left button clicked down + self.lastrot = copy.copy(self.thisrot) # Set Last Static Rotation To Last Dynamic One + self.isdragging = True # // Prepare For Dragging + mouse_pt = Point2fT(cursor_x, cursor_y) + self.arcball.click(mouse_pt) # Update Start Vector And Prepare For Dragging + + elif (button == GLUT.GLUT_RIGHT_BUTTON and button_state == GLUT.GLUT_DOWN): + # If a mouse click location was requested, return it to caller + if hasattr(self, 'event_port'): + self.mouseclick_port = self.event_port + del self.event_port + if hasattr(self, 'mouseclick_port'): + self.send_mouseclick_to_caller(cursor_x, cursor_y) + + elif (button == GLUT.GLUT_MIDDLE_BUTTON and button_state == GLUT.GLUT_DOWN): + # If a mouse click location was requested, return it to caller + if hasattr(self, 'event_port'): + self.mouseclick_port = self.event_port + del self.event_port + if hasattr(self, 'mouseclick_port'): + self.send_mouseclick_to_caller(cursor_x, cursor_y, button='middle') + + GLUT.glutPostRedisplay() + + def send_mouseclick_to_caller(self, cursor_x, cursor_y, button='right'): + + client = zmq.Context.instance().socket(zmq.PUSH) + client.connect('{}://{}:{}'.format(ZMQ_TRANSPORT, ZMQ_HOST, self.mouseclick_port)) + cameras = self.on_draw(want_cameras=True) + + window_height = GLUT.glutGet(GLUT.GLUT_WINDOW_HEIGHT) + depth_value = GL.glReadPixels(cursor_x, window_height - cursor_y, 1, 1, GL.GL_DEPTH_COMPONENT, GL.GL_FLOAT) + + pyobj = { + 'event_type': 'mouse_click_%sbutton' % button, + 'u': None, 'v': None, + 'x': None, 'y': None, 'z': None, + 'subwindow_row': None, + 'subwindow_col': None + } + + for subwin_row, camera_list in enumerate(cameras): + for subwin_col, camera in enumerate(camera_list): + + # test for out-of-bounds + if cursor_x < camera['viewport'][0]: + continue + if cursor_x > (camera['viewport'][0] + camera['viewport'][2]): + continue + if window_height - cursor_y < camera['viewport'][1]: + continue + if window_height - cursor_y > (camera['viewport'][1] + camera['viewport'][3]): + continue + + xx, yy, zz = GLU.gluUnProject( + cursor_x, window_height - cursor_y, depth_value, + camera['modelview_matrix'], + camera['projection_matrix'], + camera['viewport']) + + pyobj = { + 'event_type': 'mouse_click_%sbutton' % button, + 'u': cursor_x - camera['viewport'][0], 'v': window_height - cursor_y - camera['viewport'][1], + 'x': xx, 'y': yy, 'z': zz, + 'which_subwindow': (subwin_row, subwin_col) + } + + client.send_pyobj(pyobj) + del self.mouseclick_port + + def on_draw(self, want_cameras=False): + # sys.stderr.write('fps: %.2e\n' % (1. / (time.time() - self.tm_for_fps))) + self.tm_for_fps = time.time() + GL.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT) + cameras = [] + for mvl in self.mesh_viewers: + cameras.append([]) + for mv in mvl: + cameras[-1].append(mv.on_draw(self.transform, want_cameras)) + GL.glFlush() # Flush The GL Rendering Pipeline + GLUT.glutSwapBuffers() + self.need_redraw = False + if want_cameras: + return cameras + + def on_resize_window(self, Width, Height): + """Reshape The Window When It's Moved Or Resized""" + self.arcball.setBounds(Width, Height) # //*NEW* Update mouse bounds for arcball + return + + def send_window_shape(self, port): + client = zmq.Context.instance().socket(zmq.PUSH) + client.connect('{}://{}:{}'.format(ZMQ_TRANSPORT, ZMQ_HOST, port)) + client.send_pyobj({ + 'event_type': 'window_shape', + 'shape': self.shape + }) + + def handle_request(self, request): + label = request['label'] + obj = request['obj'] + w = request['which_window'] + mv = self.mesh_viewers[w[0]][w[1]] + + logging.debug("received a request: {}".format(request)) + + # Handle each type of request. + # Some requests require a redraw, and + # some don't. + if label == 'dynamic_meshes': + mv.dynamic_meshes = obj + self.need_redraw = True + elif label == 'dynamic_models': + mv.dynamic_models = obj + self.need_redraw = True + elif label == 'static_meshes': + mv.static_meshes = obj + self.need_redraw = True + elif label == 'dynamic_lines': + mv.dynamic_lines = obj + self.need_redraw = True + elif label == 'static_lines': + mv.static_lines = obj + self.need_redraw = True + elif label == 'autorecenter': + mv.autorecenter = obj + self.need_redraw = True + elif label == 'titlebar': + assert(isinstance(obj, str)) + self.titlebar = obj + GLUT.glutSetWindowTitle(obj) + elif label == 'lighting_on': + mv.lighting_on = obj + self.need_redraw = True + elif label == 'background_color': + GL.glClearColor(obj[0], obj[1], obj[2], 1.0) + self.need_redraw = True + elif label == 'save_snapshot': # redraws for itself + assert(isinstance(obj, str)) + self.snapshot(obj) + elif label == 'get_keypress': + self.keypress_port = obj + elif label == 'get_mouseclick': + self.mouseclick_port = obj + elif label == 'get_event': + self.event_port = obj + elif label == 'get_window_shape': + self.send_window_shape(obj) + else: + return False # can't handle this request string + + return True # handled the request string + + def checkQueue(self, unused_timer_id): + GLUT.glutTimerFunc(20, self.checkQueue, 0) + + try: + request = self.server.recv_pyobj(zmq.NOBLOCK) + except zmq.ZMQError as e: + if e.errno != zmq.EAGAIN: + raise # something wrong besides empty queue + return # empty queue, no problem + + if not request: + return + + while (request): + task_completion_time = time.time() + if not self.handle_request(request): + raise Exception('Unknown command string: %s' % (request['label'])) + task_completion_time = time.time() - task_completion_time + + if 'port' in request: # caller wants confirmation + port = request['port'] + client = zmq.Context.instance().socket(zmq.PUSH) + client.connect('{}://{}:{}'.format(ZMQ_TRANSPORT, ZMQ_HOST, port)) + client.send_pyobj(task_completion_time) + try: + request = self.server.recv_pyobj(zmq.NOBLOCK) + except zmq.ZMQError as e: + if e.errno != zmq.EAGAIN: + raise + request = None + + if self.need_redraw: + GLUT.glutPostRedisplay() + + def init_opengl(self): + """A general OpenGL initialization function. Sets all of the initial parameters. + + We call this right after our OpenGL window is created. + """ + + GL.glClearColor(0.0, 0.0, 0.0, 1.0) # This Will Clear The Background Color To Black + GL.glClearDepth(1.0) # Enables Clearing Of The Depth Buffer + GL.glDepthFunc(GL.GL_LEQUAL) # The Type Of Depth Test To Do + GL.glEnable(GL.GL_DEPTH_TEST) # Enables Depth Testing + GL.glShadeModel(GL.GL_SMOOTH) + GL.glHint(GL.GL_PERSPECTIVE_CORRECTION_HINT, GL.GL_NICEST) # Really Nice Perspective Calculations + + GL.glEnable(GL.GL_LIGHT0) + GL.glEnable(GL.GL_LIGHTING) + + GL.glEnable(GL.GL_COLOR_MATERIAL) + GL.glEnable(GL.GL_NORMALIZE) # important since we rescale the modelview matrix + + return True + + +if __name__ == '__main__': + if len(sys.argv) == 2 and sys.argv[1] == 'TEST_FOR_OPENGL': + _test_for_opengl() + + elif len(sys.argv) > 2: + m = MeshViewerRemote(titlebar=sys.argv[1], + subwins_vert=int(sys.argv[2]), + subwins_horz=int(sys.argv[3]), + width=int(sys.argv[4]), + height=int(sys.argv[5])) + else: + print("#" * 10) + print('Usage:') + print("python -m %s.%s arguments" % (__package__, os.path.splitext(os.path.basename(__file__))[0])) diff --git a/mesh-master/mesh/processing.py b/mesh-master/mesh/processing.py new file mode 100644 index 0000000000000000000000000000000000000000..c3deeed7ab62fe8b2bb36d8d2407a8dd3478d34d --- /dev/null +++ b/mesh-master/mesh/processing.py @@ -0,0 +1,186 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2013 Max Planck Society. All rights reserved. +# Created by Matthew Loper on 2013-02-20. + + +""" +Mesh processing backend +======================= + +""" + +import numpy as np + + +def reset_normals(self, face_to_verts_sparse_matrix=None, reset_face_normals=False): + self.vn = self.estimate_vertex_normals(face_to_verts_sparse_matrix=None) + if reset_face_normals: + self.fn = self.f.copy() + return self + + +def reset_face_normals(self): + if not hasattr(self, 'vn'): + self.reset_normals() + self.fn = self.f + return self + + +def uniquified_mesh(self): + """This function returns a copy of the mesh in which vertices are copied such that + each vertex appears in only one face, and hence has only one texture""" + from mesh import Mesh + new_mesh = Mesh(v=self.v[self.f.flatten()], f=np.array(range(len(self.f.flatten()))).reshape(-1, 3)) + + if not hasattr(self, 'vn'): + self.reset_normals() + new_mesh.vn = self.vn[self.f.flatten()] + + if hasattr(self, 'vt'): + new_mesh.vt = self.vt[self.ft.flatten()] + new_mesh.ft = new_mesh.f.copy() + return new_mesh + + +def keep_vertices(self, keep_list): + trans = dict((v, i) for i, v in enumerate(keep_list)) + trans_f = np.array([trans[v] if v in trans else -1 for row in self.f for v in row], dtype=np.uint32).reshape(-1, 3) + if hasattr(self, 'vn') and self.vn.shape[0] == self.vn.shape[0]: + self.vn = self.vn.reshape(-1, 3)[keep_list] + if hasattr(self, 'vc') and self.vc.shape[0] == self.v.shape[0]: + self.vc = self.vc.reshape(-1, 3)[keep_list] + if hasattr(self, 'landm_raw_xyz'): + self.recompute_landmark_indices() + + self.v = self.v.reshape(-1, 3)[keep_list] + self.f = trans_f[(trans_f != np.uint32(-1)).all(axis=1)] + return self + + +def point_cloud(self): + from .mesh import Mesh + return Mesh(v=self.v, f=[], vc=self.vc) if hasattr(self, 'vc') else Mesh(v=self.v, f=[]) + + +def remove_faces(self, face_indices_to_remove): + + def arr_replace(arr_in, lookup_dict): + arr_out = arr_in.copy() + for k, v in lookup_dict.iteritems(): + arr_out[arr_in == k] = v + return arr_out + + f = np.delete(self.f, face_indices_to_remove, 0) + v2keep = np.unique(f) + self.v = self.v[v2keep] + self.f = arr_replace(f, dict((v, i) for i, v in enumerate(v2keep))) + + if hasattr(self, 'fc'): + self.fc = np.delete(self.fc, face_indices_to_remove, 0) + if hasattr(self, 'vn') and self.vn.shape[0] == self.vn.shape[0]: + self.vn = self.vn.reshape(-1, 3)[v2keep] + if hasattr(self, 'vc') and self.vc.shape[0] == self.v.shape[0]: + self.vc = self.vc.reshape(-1, 3)[v2keep] + if hasattr(self, 'landm_raw_xyz'): + self.recompute_landmark_indices() + + if hasattr(self, 'ft'): + ft = np.delete(self.ft, face_indices_to_remove, 0) + vt2keep = np.unique(ft) + self.vt = self.vt[vt2keep] + self.ft = arr_replace(ft, dict((v, i) for i, v in enumerate(vt2keep))) + + return self + + +def flip_faces(self): + self.f = self.f.copy() + for i in range(len(self.f)): + self.f[i] = self.f[i][::-1] + if hasattr(self, 'ft'): + for i in range(len(self.f)): + self.ft[i] = self.ft[i][::-1] + return self + + +def scale_vertices(self, scale_factor): + self.v *= scale_factor + return self + + +def rotate_vertices(self, rotation_matrix): + import cv2 + rotation_matrix = np.matrix(cv2.Rodrigues(np.array(rotation_matrix))[0] if (np.array(rotation_matrix).shape != (3, 3)) else rotation_matrix) + self.v = np.array(self.v * rotation_matrix.T) + return self + + +def translate_vertices(self, translation): + self.v += translation + return self + + +def subdivide_triangles(self): + new_faces = [] + new_vertices = self.v.copy() + for face in self.f: + face_vertices = np.array([self.v[face[0], :], self.v[face[1], :], self.v[face[2], :]]) + new_vertex = np.mean(face_vertices, axis=0) + new_vertices = np.vstack([new_vertices, new_vertex]) + new_vertex_index = len(new_vertices) - 1 + if len(new_faces): + new_faces = np.vstack([new_faces, [face[0], face[1], new_vertex_index], [face[1], face[2], new_vertex_index], [face[2], face[0], new_vertex_index]]) + else: + new_faces = np.array([[face[0], face[1], new_vertex_index], [face[1], face[2], new_vertex_index], [face[2], face[0], new_vertex_index]]) + self.v = new_vertices + self.f = new_faces + + if hasattr(self, 'vt'): + new_ft = [] + new_texture_coordinates = self.vt.copy() + for face_texture in self.ft: + face_texture_coordinates = np.array([self.vt[face_texture[0], :], self.vt[face_texture[1], :], self.vt[face_texture[2], :]]) + new_texture_coordinate = np.mean(face_texture_coordinates, axis=0) + new_texture_coordinates = np.vstack([new_texture_coordinates, new_texture_coordinate]) + new_texture_index = len(new_texture_coordinates) - 1 + if len(new_ft): + new_ft = np.vstack([new_ft, [face_texture[0], face_texture[1], new_texture_index], [face_texture[1], face_texture[2], new_texture_index], [face_texture[2], face_texture[0], new_texture_index]]) + else: + new_ft = np.array([[face_texture[0], face_texture[1], new_texture_index], [face_texture[1], face_texture[2], new_texture_index], [face_texture[2], face_texture[0], new_texture_index]]) + self.vt = new_texture_coordinates + self.ft = new_ft + return self + + +def concatenate_mesh(self, mesh): + if len(self.v) == 0: + self.f = mesh.f.copy() + self.v = mesh.v.copy() + self.vc = mesh.vc.copy() if hasattr(mesh, 'vc') else None + elif len(mesh.v): + self.f = np.concatenate([self.f, mesh.f.copy() + len(self.v)]) + self.v = np.concatenate([self.v, mesh.v]) + self.vc = np.concatenate([self.vc, mesh.vc]) if (hasattr(mesh, 'vc') and hasattr(self, 'vc')) else None + return self + + +# new_ordering specifies the new index of each vertex. If new_ordering[i] = j, +# vertex i should now be the j^th vertex. As such, each entry in new_ordering should be unique. +def reorder_vertices(self, new_ordering, new_normal_ordering=None): + if new_normal_ordering is None: + new_normal_ordering = new_ordering + inverse_ordering = np.zeros(len(new_ordering), dtype=int) + for i, j in enumerate(new_ordering): + inverse_ordering[j] = i + inverse_normal_ordering = np.zeros(len(new_normal_ordering), dtype=int) + for i, j in enumerate(new_normal_ordering): + inverse_normal_ordering[j] = i + self.v = self.v[inverse_ordering] + if hasattr(self, 'vn'): + self.vn = self.vn[inverse_normal_ordering] + for i in range(len(self.f)): + self.f[i] = np.array([new_ordering[vertex_index] for vertex_index in self.f[i]]) + if hasattr(self, 'fn'): + self.fn[i] = np.array([new_normal_ordering[normal_index] for normal_index in self.fn[i]]) diff --git a/mesh-master/mesh/ressources/Arial.ttf b/mesh-master/mesh/ressources/Arial.ttf new file mode 100644 index 0000000000000000000000000000000000000000..ab68fb197d4479b3b6dec6e85bd5cbaf433a87c5 Binary files /dev/null and b/mesh-master/mesh/ressources/Arial.ttf differ diff --git a/mesh-master/mesh/search.py b/mesh-master/mesh/search.py new file mode 100644 index 0000000000000000000000000000000000000000..b90bb43e9feb8622ca5fef1c0be500d2d5eed463 --- /dev/null +++ b/mesh-master/mesh/search.py @@ -0,0 +1,100 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Created by Matthew Loper on 2013-02-20. +# Copyright (c) 2013 MPI. All rights reserved. + +""" +Searching and lookup of geometric entities +========================================== + +""" + + +import numpy as np + +__all__ = ['AabbTree', 'AabbNormalsTree', 'ClosestPointTree', 'CGALClosestPointTree'] + + +class AabbTree(object): + """Encapsulates an AABB (Axis Aligned Bounding Box) Tree""" + def __init__(self, m): + from . import spatialsearch + # this shit return NULL + self.cpp_handle = spatialsearch.aabbtree_compute(m.v.astype(np.float64).copy(order='C'), m.f.astype(np.uint32).copy(order='C')) + + def nearest(self, v_samples, nearest_part=False): + "nearest_part tells you whether the closest point in triangle abc is in the interior (0), on an edge (ab:1,bc:2,ca:3), or a vertex (a:4,b:5,c:6)" + from . import spatialsearch + f_idxs, f_part, v = spatialsearch.aabbtree_nearest(self.cpp_handle, np.array(v_samples, dtype=np.float64, order='C')) + return (f_idxs, f_part, v) if nearest_part else (f_idxs, v) + + def nearest_alongnormal(self, points, normals): + from . import spatialsearch + distances, f_idxs, v = spatialsearch.aabbtree_nearest_alongnormal(self.cpp_handle, + points.astype(np.float64), + normals.astype(np.float64)) + return (distances, f_idxs, v) + + def intersections_indices(self, q_v, q_f): + ''' + Given a set of query vertices and faces, the function computes which intersect the mesh + A list with the indices in q_f is returned + @param q_v The query vertices (array of 3xN float values) + @param q_f The query faces (array 3xF integer values) + ''' + import spatialsearch + return spatialsearch.aabbtree_intersections_indices(self.cpp_handle, + q_v.astype(np.float64), + q_f.astype(np.uint32)) + + +class ClosestPointTree(object): + """Provides nearest neighbor search for a cloud of vertices (i.e. triangles are not used)""" + def __init__(self, m): + from scipy.spatial import KDTree + self.v = m.v + self.kdtree = KDTree(self.v) + + def nearest(self, v_samples): + (distances, indices) = zip(*[self.kdtree.query(v) for v in v_samples]) + return (indices, distances) + + def nearest_vertices(self, v_samples): + (distances, indices) = zip(*[self.kdtree.query(v) for v in v_samples]) + return self.v[indices] + + +class CGALClosestPointTree(object): + """Encapsulates an AABB (Axis Aligned Bounding Box) Tree """ + def __init__(self, m): + from . import spatialsearch + self.v = m.v + n = m.v.shape[0] + faces = np.vstack([np.array(range(n)), np.array(range(n)) + n, np.array(range(n)) + 2 * n]).T + eps = 0.000000000001 + self.cpp_handle = spatialsearch.aabbtree_compute(np.vstack([m.v + eps * np.array([1.0, 0.0, 0.0]), m.v + eps * np.array([0.0, 1.0, 0.0]), m.v - eps * np.array([1.0, 1.0, 0.0])]).astype(np.float64).copy(order='C'), faces.astype(np.uint32).copy(order='C')) + + def nearest(self, v_samples): + from . import spatialsearch + f_idxs, f_part, v = spatialsearch.aabbtree_nearest(self.cpp_handle, np.array(v_samples, dtype=np.float64, order='C')) + return (f_idxs.flatten(), (np.sum(((self.v[f_idxs.flatten()] - v_samples) ** 2.0), axis=1) ** 0.5).flatten()) + + def nearest_vertices(self, v_samples): + from . import spatialsearch + f_idxs, f_part, v = spatialsearch.aabbtree_nearest(self.cpp_handle, np.array(v_samples, dtype=np.float64, order='C')) + return self.v[f_idxs.flatten()] + + +class AabbNormalsTree(object): + def __init__(self, m): + # the weight of the normals cosine is proportional to the std of the vertices + # the best point can be translated up to 2*eps because of the normals + from . import aabb_normals + eps = 0.1 # np.std(m.v)#0 + self.tree_handle = aabb_normals.aabbtree_n_compute(m.v, m.f.astype(np.uint32).copy(), eps) + + def nearest(self, v_samples, n_samples): + from . import aabb_normals + closest_tri, closest_p = aabb_normals.aabbtree_n_nearest(self.tree_handle, v_samples, n_samples) + return (closest_tri, closest_p) diff --git a/mesh-master/mesh/serialization/__init__.py b/mesh-master/mesh/serialization/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..52544ca0073c7ac8e827b343e4911fd197faf38b --- /dev/null +++ b/mesh-master/mesh/serialization/__init__.py @@ -0,0 +1,4 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2016 Max Planck Society. All rights reserved. diff --git a/mesh-master/mesh/serialization/serialization.py b/mesh-master/mesh/serialization/serialization.py new file mode 100644 index 0000000000000000000000000000000000000000..1f285a4d55ac5b8f033be62a72bbfc44f1797bad --- /dev/null +++ b/mesh-master/mesh/serialization/serialization.py @@ -0,0 +1,443 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2013 Max Planck Society. All rights reserved. +# Created by Matthew Loper on 2013-02-20. + +import re +import os +import sys +import numpy as np + +from ..errors import SerializationError + +""" +serialization.py + + +""" + +__all__ = ['load_from_obj', 'load_from_obj_cpp', 'write_obj', 'write_mtl', + 'write_json', 'write_three_json', + 'set_landmark_indices_from_ppfile', 'set_landmark_indices_from_lmrkfile', + 'load_from_ply', 'load_from_file'] + +# import os.path + + +def load_from_obj(self, filename): + v = [] + f = [] + ft = [] + fn = [] + vt = [] + vn = [] + vc = [] + segm = dict() + landm_raw_xyz = dict() + currSegm = '' + currLandm = '' + with open(filename, 'r', buffering=2 ** 10) as fp: + for line in fp: + line = line.split() + if len(line) > 0: + if line[0] == 'v': + v.append([float(x) for x in line[1:4]]) + if len(line) == 7: + vc.append([float(x) for x in line[4:]]) + if currLandm: + landm_raw_xyz[currLandm] = v[-1] + currLandm = '' + elif line[0] == 'vt': + vt.append([float(x) for x in line[1:]]) + elif line[0] == 'vn': + vn.append([float(x) for x in line[1:]]) + elif line[0] == 'f': + faces = [x.split('/') for x in line[1:]] + for iV in range(1, len(faces) - 1): # trivially triangulate faces + f.append([int(faces[0][0]), int(faces[iV][0]), int(faces[iV + 1][0])]) + if (len(faces[0]) > 1) and faces[0][1]: + ft.append([int(faces[0][1]), int(faces[iV][1]), int(faces[iV + 1][1])]) + if (len(faces[0]) > 2) and faces[0][2]: + fn.append([int(faces[0][2]), int(faces[iV][2]), int(faces[iV + 1][2])]) + if currSegm: + segm[currSegm].append(len(f) - 1) + elif line[0] == 'g': + currSegm = line[1] + if currSegm not in segm.keys(): + segm[currSegm] = [] + elif line[0] == '#landmark': + currLandm = line[1] + elif line[0] == 'mtllib': + self.materials_filepath = os.path.join(os.path.dirname(filename), line[1]) + self.materials_file = open(self.materials_filepath, 'r').readlines() + + self.v = np.array(v) + self.f = np.array(f) - 1 + if vt: + self.vt = np.array(vt) + if vn: + self.vn = np.array(vn) + if vc: + self.vc = np.array(vc) + if ft: + self.ft = np.array(ft) - 1 + if fn: + self.fn = np.array(fn) - 1 + self.segm = segm + self.landm_raw_xyz = landm_raw_xyz + self.recompute_landmark_indices() + + if hasattr(self, 'materials_file'): + for line in self.materials_file: + if line and line.split() and line.split()[0] == 'map_Ka': + self.texture_filepath = os.path.abspath(os.path.join(os.path.dirname(filename), line.split()[1])) + + +def load_from_obj_cpp(self, filename): + from .loadobj import loadobj + if sys.version_info[:2] == (2, 6): + from OrderedDict import OrderedDict + else: + from collections import OrderedDict + + v, vt, vn, f, ft, fn, mtl_path, landm, segm = loadobj(filename) + if v.size != 0: + self.v = v + if f.size != 0: + self.f = f + if vn.size != 0: + self.vn = vn + if vt.size != 0: + self.vt = vt + if fn.size != 0: + self.fn = fn + if ft.size != 0: + self.ft = ft + if segm: + self.segm = OrderedDict([(k, v if type(v) is list else v.tolist()) for k, v in segm.items()]) + if mtl_path: + try: + self.materials_filepath = os.path.join(os.path.dirname(filename), mtl_path.strip()) + self.materials_file = file(self.materials_filepath, 'r').readlines() + except: + self.materials_filepath = None + if hasattr(self, 'materials_file'): + for line in self.materials_file: + if line and line.split() and line.split()[0] == 'map_Ka': + self.texture_filepath = os.path.abspath(os.path.join(os.path.dirname(filename), line.split()[1])) + if landm: + self.landm = landm + self.recompute_landmark_xyz() + + +def write_obj(self, filename, flip_faces=False, group=False, comments=None): + if os.path.dirname(filename) and not os.path.exists(os.path.dirname(filename)): + os.makedirs(os.path.dirname(filename)) + + ff = -1 if flip_faces else 1 + + def write_face_to_obj_file(face_index, obj_file): + vertex_indices = self.f[face_index][::ff] + 1 + + if hasattr(self, 'ft'): + texture_indices = self.ft[face_index][::ff] + 1 + if not hasattr(self, 'fn'): + self.reset_face_normals() + normal_indices = self.fn[face_index][::ff] + 1 + obj_file.write('f %d/%d/%d %d/%d/%d %d/%d/%d\n' % tuple( + np.array([vertex_indices, texture_indices, normal_indices]).T.flatten())) + elif hasattr(self, 'fn'): + normal_indices = self.fn[face_index][::ff] + 1 + obj_file.write('f %d//%d %d//%d %d//%d\n' % tuple(np.array([vertex_indices, normal_indices]).T.flatten())) + else: + obj_file.write('f %d %d %d\n' % tuple(vertex_indices)) + + with open(filename, 'w') as fi: + if comments is not None: + if isinstance(comments, str): + comments = [comments] + for comment in comments: + for line in comment.split("\n"): + fi.write("# %s\n" % line) + + if hasattr(self, 'texture_filepath'): + outfolder = os.path.dirname(filename) + outbase = os.path.splitext(os.path.basename(filename))[0] + mtlpath = outbase + '.mtl' + fi.write('mtllib %s\n' % mtlpath) + from shutil import copyfile + texture_name = outbase + os.path.splitext(self.texture_filepath)[1] + if os.path.abspath(self.texture_filepath) != os.path.abspath(os.path.join(outfolder, texture_name)): + copyfile(self.texture_filepath, os.path.join(outfolder, texture_name)) + self.write_mtl(os.path.join(outfolder, mtlpath), outbase, texture_name) + + for r in self.v: + fi.write('v %f %f %f\n' % (r[0], r[1], r[2])) + + if hasattr(self, 'fn') and hasattr(self, 'vn'): + for r in self.vn: + fi.write('vn %f %f %f\n' % (r[0], r[1], r[2])) + + if hasattr(self, 'ft'): + for r in self.vt: + if len(r) == 3: + fi.write('vt %f %f %f\n' % (r[0], r[1], r[2])) + else: + fi.write('vt %f %f\n' % (r[0], r[1])) + if hasattr(self, 'segm') and self.segm and not group: + for p in self.segm.keys(): + fi.write('g %s\n' % p) + for face_index in self.segm[p]: + write_face_to_obj_file(face_index, fi) + else: + if hasattr(self, 'f'): + for face_index in range(len(self.f)): + write_face_to_obj_file(face_index, fi) + + +def write_mtl(self, path, material_name, texture_name): + """Material attribute file serialization""" + with open(path, 'w') as f: + f.write('newmtl %s\n' % material_name) + # copied from another obj, no idea about what it does + f.write('ka 0.329412 0.223529 0.027451\n') + f.write('kd 0.780392 0.568627 0.113725\n') + f.write('ks 0.992157 0.941176 0.807843\n') + f.write('illum 0\n') + f.write('map_Ka %s\n' % texture_name) + f.write('map_Kd %s\n' % texture_name) + f.write('map_Ks %s\n' % texture_name) + + +def write_ply(self, filename, flip_faces=False, ascii=False, little_endian=True, comments=[]): + from psbody.mesh.serialization import plyutils + + if os.path.dirname(filename) and not os.path.exists(os.path.dirname(filename)): + os.makedirs(os.path.dirname(filename)) + + ff = -1 if flip_faces else 1 + + if isinstance(comments, str): + comments = [comments] + comments = filter(lambda c: len(c) > 0, sum(map(lambda c: c.split("\n"), comments), [])) + + plyutils.write(list([list(x) for x in self.v]), + list([list(x[::ff]) for x in self.f] if hasattr(self, 'f') else []), + list([list((x * 255).astype(int)) for x in ([] if not hasattr(self, 'vc') else self.vc)]), + filename, ascii, little_endian, list(comments), + list([list(x) for x in ([] if not hasattr(self, 'vn') else self.vn)])) + + +def write_three_json(self, filename, name=""): + import json + + if os.path.dirname(filename) and not os.path.exists(os.path.dirname(filename)): + os.makedirs(os.path.dirname(filename)) + + name = name if name else self.basename + name = name if name else os.path.splitext(os.path.basename(filename))[0] + + metadata = {"formatVersion": 3.1, + "sourceFile": "%s.obj" % name, + "generatedBy": "korper", + "vertices": len(self.v), + "faces": len(self.f), + "normals": len(self.vn), + "colors": 0, + "uvs": len(self.vt), + "materials": 1 + } + materials = [{"DbgColor": 15658734, + "DbgIndex": 0, + "DbgName": "defaultMat", + "colorAmbient": [0.0, 0.0, 0.0], + "colorDiffuse": [0.64, 0.64, 0.64], + "colorSpecular": [0.5, 0.5, 0.5], + "illumination": 2, + "opticalDensity": 1.0, + "specularCoef": 96.078431, + "transparency": 1.0 + }] + + mesh_data = {"metadata": metadata, + 'scale': 0.35, + "materials": materials, + "morphTargets": [], + "morphColors": [], + "colors": []} + mesh_data["vertices"] = self.v.flatten().tolist() + mesh_data["normals"] = self.vn.flatten().tolist() + mesh_data["uvs"] = [np.array([[vt[0], vt[1]] for vt in self.vt]).flatten().tolist()] + mesh_data["faces"] = np.array([[42, self.f[i][0], self.f[i][1], self.f[i][2], 0, self.ft[i][0], self.ft[i][1], + self.ft[i][2], self.fn[i][0], self.fn[i][1], self.fn[i][2]] for i in + range(len(self.f))]).flatten().tolist() + + json_or_js_file = open(filename, 'w') + json_or_js_file.write(json.dumps(mesh_data, indent=4)) + json_or_js_file.close() + + +def write_json(self, filename, header="", footer="", name="", include_faces=True, texture_mode=True): + import json + + if os.path.dirname(filename) and not os.path.exists(os.path.dirname(filename)): + os.makedirs(os.path.dirname(filename)) + + name = name if name else self.basename + name = name if name else os.path.splitext(os.path.basename(filename))[0] + + if texture_mode: + vertex_texture_pairs = {} + for face_index in range(len(self.f)): + for i in [0, 1, 2]: + v_index = self.f[face_index][i] + t_index = self.ft[face_index][i] + vertex_texture_pairs[(v_index, t_index)] = [] + for face_index in range(len(self.f)): + for i in [0, 1, 2]: + v_index = self.f[face_index][i] + t_index = self.ft[face_index][i] + vertex_texture_pairs[(v_index, t_index)].append((face_index, i)) + mesh_data = {'name': name, + 'vertices': [], + 'textures': [] + } + for v_index, t_index, faces_entries in vertex_texture_pairs.items(): + mesh_data['vertices'].append() + + if include_faces: + mesh_data['faces'] = list([[int(np.asscalar(i)) for i in list(x)] for x in self.f]) + + else: + mesh_data = {'name': name, + 'vertices': list([list(x) for x in self.v]) + } + if include_faces: + mesh_data['faces'] = list([[int(np.asscalar(i)) for i in list(x)] for x in self.f]) + + json_or_js_file = open(filename, 'w') + if os.path.basename(filename).endswith('js'): + json_or_js_file.write(header + '\nmesh = ') if header else json_or_js_file.write('var mesh = ') + json_or_js_file.write(json.dumps(mesh_data, indent=4)) + json_or_js_file.write(footer) + else: + json_or_js_file.write(json.dumps(mesh_data, indent=4)) + json_or_js_file.close() + + +def set_landmark_indices_from_ppfile(self, ppfilename): + from xml.etree import ElementTree + tree = ElementTree.parse(ppfilename) + + def get_xyz(e): + try: + return [float(e.attrib['x']), float(e.attrib['y']), float(e.attrib['z'])] + except: # may happen if landmarks are just spaces + return [0, 0, 0] + + self.landm_raw_xyz = dict((e.attrib['name'], get_xyz(e)) for e in tree.iter() if e.tag == 'point') + self.recompute_landmark_indices(ppfilename) + + +def set_landmark_indices_from_lmrkfile(self, lmrkfilename): + with open(lmrkfilename, 'r') as lmrkfile: + self.landm_raw_xyz = {} + + for line in lmrkfile.readlines(): + if not line.strip(): + continue + command = line.split()[0] + data = [float(x) for x in line.split()[1:]] + + if command == '_scale': + selfscale_factor = np.matrix(data) + elif command == '_translate': + self.caesar_translation_vector = np.matrix(data) + elif command == '_rotation': + self.caesar_rotation_matrix = np.matrix(data).reshape(3, 3) + else: + self.landm_raw_xyz[command] = [data[1], data[2], data[0]] + self.recompute_landmark_indices(lmrkfilename) + + +def _is_lmrkfile(filename): + is_lmrk = re.compile('^_scale\s[-\d\.]+\s+_translate(\s[-\d\.]+){3}\s+_rotation(\s[-\d\.]+){9}\s+') + with open(filename) as f: + data = f.read() + res = is_lmrk.match(data) + return res + + +def set_landmark_indices_from_any(self, landmarks): + ''' + Sets landmarks given any of: + - ppfile + - ldmk file + - dict of {name:inds} (i.e. mesh.landm) + - dict of {name:xyz} (i.e. mesh.landm_xyz) + - pkl, json, yaml file containing either of the above dicts + ''' + import json + import pickle + + try: + path_exists = os.path.exists(landmarks) + except: + path_exists = False + if path_exists: + if re.search(".ya{0,1}ml$", landmarks): + import yaml + with open(landmarks) as f: + self.set_landmarks_from_raw(yaml.load(f, Loader=yaml.FullLoader)) + elif re.search(".json$", landmarks): + with open(landmarks) as f: + self.set_landmarks_from_raw(json.load(f)) + elif re.search(".pkl$", landmarks): + with open(landmarks, "rb") as f: + self.set_landmarks_from_raw(pickle.load(f)) + elif _is_lmrkfile(landmarks): + self.set_landmark_indices_from_lmrkfile(landmarks) + else: + try: + self.set_landmark_indices_from_ppfile(landmarks) + except: + raise Exception("Landmark file %s is of unknown format" % landmarks) + else: + self.set_landmarks_from_raw(landmarks) + + +def load_from_file(self, filename, use_cpp=True): + if re.search(".ply$", filename): + self.load_from_ply(filename) + elif re.search(".obj$", filename): + # XXX experimental cpp obj loader, if problems, switch back to + if use_cpp: + self.load_from_obj_cpp(filename) + else: + self.load_from_obj(filename) + + elif re.search(".bsf$", filename): + self.load_from_bsf(filename) + else: + raise NotImplementedError("Unknown mesh file format.") + + +def load_from_ply(self, filename): + from os.path import abspath, dirname, join + + test_data_folder = abspath(join(dirname(__file__), '..', 'data', 'unittest')) + + from psbody.mesh.serialization import plyutils + try: + res = plyutils.read(filename) + except plyutils.error as e: + raise SerializationError(e) + + self.v = np.array(res['pts']).T.copy() + self.f = np.array(res['tri']).T.copy() + + if 'color' in res: + self.set_vertex_colors(np.array(res['color']).T.copy() / 255) + if 'normals' in res: + self.vn = np.array(res['normals']).T.copy() diff --git a/mesh-master/mesh/sphere.py b/mesh-master/mesh/sphere.py new file mode 100644 index 0000000000000000000000000000000000000000..f8b108f3b3a6effad09260811afce378b4d8e7e2 --- /dev/null +++ b/mesh-master/mesh/sphere.py @@ -0,0 +1,74 @@ +import numpy as np +from .mesh import Mesh +from .colors import name_to_rgb + + +__all__ = ['Sphere'] + + +class Sphere(object): + def __init__(self, center, radius): + if(center.flatten().shape != (3,)): + raise Exception("Center should have size(1,3) instead of %s" % center.shape) + self.center = center.flatten() + self.radius = radius + + def __str__(self): + return "%s:%s" % (self.center, self.radius) + + def to_mesh(self, color=name_to_rgb['red']): + v = np.array([[0.0000, -1.000, 0.0000], [0.7236, -0.447, 0.5257], + [-0.278, -0.447, 0.8506], [-0.894, -0.447, 0.0000], + [-0.278, -0.447, -0.850], [0.7236, -0.447, -0.525], + [0.2765, 0.4472, 0.8506], [-0.723, 0.4472, 0.5257], + [-0.720, 0.4472, -0.525], [0.2763, 0.4472, -0.850], + [0.8945, 0.4472, 0.0000], [0.0000, 1.0000, 0.0000], + [-0.165, -0.850, 0.4999], [0.4253, -0.850, 0.3090], + [0.2629, -0.525, 0.8090], [0.4253, -0.850, -0.309], + [0.8508, -0.525, 0.0000], [-0.525, -0.850, 0.0000], + [-0.688, -0.525, 0.4999], [-0.162, -0.850, -0.499], + [-0.688, -0.525, -0.499], [0.2628, -0.525, -0.809], + [0.9518, 0.0000, -0.309], [0.9510, 0.0000, 0.3090], + [0.5876, 0.0000, 0.8090], [0.0000, 0.0000, 1.0000], + [-0.588, 0.0000, 0.8090], [-0.951, 0.0000, 0.3090], + [-0.955, 0.0000, -0.309], [-0.587, 0.0000, -0.809], + [0.0000, 0.0000, -1.000], [0.5877, 0.0000, -0.809], + [0.6889, 0.5257, 0.4999], [-0.262, 0.5257, 0.8090], + [-0.854, 0.5257, 0.0000], [-0.262, 0.5257, -0.809], + [0.6889, 0.5257, -0.499], [0.5257, 0.8506, 0.0000], + [0.1626, 0.8506, 0.4999], [-0.425, 0.8506, 0.3090], + [-0.422, 0.8506, -0.309], [0.1624, 0.8506, -0.499]]) + + f = np.array([[15, 3, 13], [13, 14, 15], [2, 15, 14], [13, 1, 14], [17, 2, 14], [14, 16, 17], + [6, 17, 16], [14, 1, 16], [19, 4, 18], [18, 13, 19], [3, 19, 13], [18, 1, 13], + [21, 5, 20], [20, 18, 21], [4, 21, 18], [20, 1, 18], [22, 6, 16], [16, 20, 22], + [5, 22, 20], [16, 1, 20], [24, 2, 17], [17, 23, 24], [11, 24, 23], [23, 17, 6], + [26, 3, 15], [15, 25, 26], [7, 26, 25], [25, 15, 2], [28, 4, 19], [19, 27, 28], + [8, 28, 27], [27, 19, 3], [30, 5, 21], [21, 29, 30], [9, 30, 29], [29, 21, 4], + [32, 6, 22], [22, 31, 32], [10, 32, 31], [31, 22, 5], [33, 7, 25], [25, 24, 33], + [11, 33, 24], [24, 25, 2], [34, 8, 27], [27, 26, 34], [7, 34, 26], [26, 27, 3], + [35, 9, 29], [29, 28, 35], [8, 35, 28], [28, 29, 4], [36, 10, 31], [31, 30, 36], + [9, 36, 30], [30, 31, 5], [37, 11, 23], [23, 32, 37], [10, 37, 32], [32, 23, 6], + [39, 7, 33], [33, 38, 39], [12, 39, 38], [38, 33, 11], [40, 8, 34], [34, 39, 40], + [12, 40, 39], [39, 34, 7], [41, 9, 35], [35, 40, 41], [12, 41, 40], [40, 35, 8], + [42, 10, 36], [36, 41, 42], [12, 42, 41], [41, 36, 9], [38, 11, 37], [37, 42, 38], + [12, 38, 42], [42, 37, 10]]) - 1 + + return Mesh(v=v * self.radius + self.center, f=f, vc=np.tile(color, (v.shape[0], 1))) + + def has_inside(self, point): + return np.linalg.norm(point - self.center) <= self.radius + + def intersects(self, sphere): + return np.linalg.norm(sphere.center - self.center) < (self.radius + sphere.radius) + + def intersection_vol(self, sphere): + if not self.intersects(sphere): + return 0 + d = np.linalg.norm(sphere.center - self.center) + R, r = (self.radius, sphere.radius) if (self.radius > sphere.radius) else (sphere.radius, self.radius) + if R >= (d + r): + return (4 * np.pi * (r ** 3)) / 3 + + # http://mathworld.wolfram.com/Sphere-SphereIntersection.html + return (np.pi * (R + r - d) ** 2 * (d ** 2 + 2 * d * r - 3 * r * r + 2 * d * R + 6 * r * R - 3 * R * R)) / (12 * d) diff --git a/mesh-master/mesh/src/AABB_n_tree.h b/mesh-master/mesh/src/AABB_n_tree.h new file mode 100644 index 0000000000000000000000000000000000000000..7903eac9cff9f1fccc07d025ea78be6537b8fe48 --- /dev/null +++ b/mesh-master/mesh/src/AABB_n_tree.h @@ -0,0 +1,355 @@ +// Author(s) : Javier Romero + +#ifndef AABB_N_TREE_H +#define AABB_N_TREE_H + +#include + +#define CGAL_CFG_NO_CPP0X_VARIADIC_TEMPLATES 1 +#include +#include +#include +#include +#include + +#include +#include + +#include +#include + + +typedef CGAL::Simple_cartesian K; +using boost::uint32_t; +using boost::uint64_t; +using boost::array; +using std::vector; + +typedef K::Point_3 Point; +typedef K::Segment_3 Segment; +typedef K::Triangle_3 Triangle; +typedef K::Vector_3 Normal; +typedef std::pair Point_Normal; + +typedef std::vector::iterator Iterator; +typedef CGAL::AABB_triangle_primitive Primitive; + + +namespace CGAL { + template + FT dist_point_normal(const Point_Normal& a, const Point_Normal& b, FT eps){ + return (sqrt(squared_distance(a.first, b.first)) + eps*(1 - a.second*b.second)); + } + + // Adaptation of nearest_point_3 to take into account normals + // product with weight eps + template + Point_Normal nearest_pointnormal_3(const Point_Normal& origin, + const Triangle& triangle, + const Point_Normal& bound, + const FT eps, + const K& k){ + // compute normal penalties + const FT dist_n_bound = eps*(1 - origin.second*bound.second); + //const Normal tri_n = unit_normal(triangle[0],triangle[1],triangle[2]); + Normal tri_n = triangle.supporting_plane().orthogonal_direction().vector(); + tri_n = tri_n / sqrt(tri_n.squared_length()); + const FT dist_n_triangle = eps*(1 - origin.second*tri_n); + + // Distance from origin to bound + const FT dist_bound = sqrt(squared_distance(origin.first,bound.first)) + dist_n_bound; + + // since dist_n_triangle < dist_triangle + if (dist_n_triangle > dist_bound) + return bound; + + // Project origin on triangle supporting plane + const Point_Normal proj = std::make_pair(triangle.supporting_plane().projection(origin.first), tri_n); + + const FT dist_proj = sqrt(squared_distance(origin.first,proj.first)) + dist_n_triangle; + + Point moved_point; + // If point is projected outside, return bound + if ( dist_proj > dist_bound) + return bound; + // If proj is inside triangle, total dist is dist_proj + else if ( CGAL::internal::is_inside_triangle_3(proj.first,triangle,moved_point,k) ) + return proj; + // Else return the constructed point (nearest point of triangle from proj) + // if it is closest to origin than bound + else{ + const FT dist_moved = sqrt(squared_distance(origin.first, moved_point)) + dist_n_triangle; + return (dist_moved > dist_bound) ? bound : std::make_pair(moved_point, tri_n); + } + } + + // extends AABB_Traits with classes handling points with normals + template + class AABB_n_traits:public AABB_traits{ + + public: + + typedef Point_Normal PointNormal; + typedef AABB_n_traits AT; + + class Do_intersect{ + public: + template + bool operator()(const Query& q, const CGAL::Bbox_3& bbox) const{ + return CGAL::do_intersect(q, bbox); + } + + template + bool operator()(const Query& q, const AABB_primitive& pr) const{ + return GeomTraits().do_intersect_3_object()(q, pr.datum()); + } + + bool operator()(const typename GeomTraits::Triangle_3& q, const AABB_primitive& pr) const{ + + // if any point is the same, don't consider it' + if(q[0] == pr.datum()[0] || q[0] == pr.datum()[1] ||q[0] == pr.datum()[2] || + q[1] == pr.datum()[0] || q[1] == pr.datum()[1] ||q[1] == pr.datum()[2] || + q[2] == pr.datum()[0] || q[2] == pr.datum()[1] ||q[2] == pr.datum()[2]) + return false; + else + return CGAL::do_intersect(q, pr.datum()); + } + }; + + class Closest_point { + typedef typename AT::Point_3 Point; + typedef typename AT::PointNormal PointNormal; + typedef typename AT::Primitive Primitive; + typedef typename AT::FT FT; + + public: + + // for intersection: return the closest point on + // triangle pr or bound to pn + PointNormal operator()(const PointNormal& pn, const Primitive& pr, + const PointNormal& bound, FT eps) const { + return nearest_pointnormal_3(pn, pr.datum(), bound, eps, K()); + } + + }; + + class Compare_distance { + typedef typename AT::Point_3 Point; + typedef typename AT::PointNormal PointNormal; + typedef typename AT::FT FT; + typedef typename AT::Primitive Primitive; + public: + + // create a sphere that contains all possible results + // (all points closer than current result) and + // check if pr intersects + template + CGAL::Comparison_result operator()(const PointNormal& p, const Solid& pr, const PointNormal& bound, FT eps) const + { + // d_q = ||q - p|| + eps(1 - n_q*n_p) > ||q-p|| + // d_q < d_b -> ||q-p|| < d_b + // a sphere containing all q such that ||q-p|| < d_b + // contains all q such that d_q < d_b + FT safe_dist = dist_point_normal(p,bound,eps); + return GeomTraits().do_intersect_3_object() + (GeomTraits().construct_sphere_3_object() + (p.first, safe_dist*safe_dist), pr)? + CGAL::SMALLER : CGAL::LARGER; + } + }; + + Closest_point closest_point_object() {return Closest_point();} + Compare_distance compare_distance_object() {return Compare_distance();} + Do_intersect do_intersect_object() {return Do_intersect();} + + };// end of AABB_n_traits + + + /** + * @class Do_intersect_noself_traits + */ + template + class Do_intersect_noself_traits + { + typedef typename AABBTraits::FT FT; + typedef typename AABBTraits::Point_3 Point; + typedef typename AABBTraits::Primitive Primitive; + typedef typename AABBTraits::Bounding_box Bounding_box; + typedef typename AABBTraits::Primitive::Id Primitive_id; + typedef typename AABBTraits::Point_and_primitive_id Point_and_primitive_id; + typedef typename AABBTraits::Object_and_primitive_id Object_and_primitive_id; + typedef ::CGAL::AABB_node Node; + typedef typename ::CGAL::AABB_tree::size_type size_type; + + public: + Do_intersect_noself_traits() + : m_is_found(false) + {} + + bool go_further() const { return !m_is_found; } + + void intersection(const Query& query, const Primitive& primitive) + { + if( AABBTraits().do_intersect_object()(query, primitive) ) + m_is_found = true; + } + + bool do_intersect(const Query& query, const Node& node) const + { + return AABBTraits().do_intersect_object()(query, node.bbox()); + } + + bool is_intersection_found() const { return m_is_found; } + + private: + bool m_is_found; + }; + + /** + * @class Projection_n_traits + */ + template + class Projection_n_traits + { + typedef typename AABBTraits::FT FT; + typedef typename AABBTraits::Point_3 Point; + typedef typename AABBTraits::PointNormal PointNormal; + typedef typename AABBTraits::Primitive Primitive; + typedef typename AABBTraits::Bounding_box Bounding_box; + typedef typename AABBTraits::Primitive::Id Primitive_id; + typedef typename AABBTraits::Point_and_primitive_id Point_and_primitive_id; + typedef typename AABBTraits::Object_and_primitive_id Object_and_primitive_id; + typedef ::CGAL::AABB_node Node; + + public: + Projection_n_traits(const PointNormal& hint, + const typename Primitive::Id& hint_primitive, + const FT eps) + : m_closest_point(hint), m_closest_primitive(hint_primitive), eps(eps) {} + + bool go_further() const { return true; } + + void intersection(const PointNormal& query, const Primitive& primitive) + { + PointNormal new_closest_point = AABBTraits().closest_point_object() + (query, primitive, m_closest_point, eps); + if(new_closest_point.first != m_closest_point.first) + { + m_closest_primitive = primitive.id(); + m_closest_point = new_closest_point; // this effectively shrinks the sphere + } + } + + bool do_intersect(const PointNormal& query, const Node& node) const + { + return AABBTraits().compare_distance_object() + (query, node.bbox(), m_closest_point, eps) == CGAL::SMALLER; + } + + Point closest_point() const { return m_closest_point.first; } + Point_and_primitive_id closest_point_and_primitive() const + { + return Point_and_primitive_id(m_closest_point.first, m_closest_primitive); + } + + private: + PointNormal m_closest_point; + typename Primitive::Id m_closest_primitive; + const FT eps; + }; + + // Class that extends AABB tree with PointNormal Search + template + class AABB_n_tree:public AABB_tree{ + public: + typedef typename AABBTraits::Point_3 Point; + typedef typename AABBTraits::PointNormal PointNormal; + typedef typename AABBTraits::Point_and_primitive_id Point_and_primitive_id; + + AABB_n_tree():AABB_tree(){} + + template + AABB_n_tree(ConstPrimitiveIterator first, ConstPrimitiveIterator beyond, + typename AABBTraits::FT eps): + AABB_tree(first, beyond), eps(eps){} + + // XXX The hint is random; that is slow and could be closer (euclideanly) than the best point + Point_and_primitive_id closest_point_and_primitive(const PointNormal& query) const{ + // return closest_point_and_primitive(query,best_hint(query.first)); + return closest_point_and_primitive(query,this->any_reference_point_and_id()); + } + + Point_and_primitive_id closest_point_and_primitive(const PointNormal& query, + const Point_and_primitive_id& hint) const{ + + Normal hint_n = (*hint.second).supporting_plane().orthogonal_direction().vector(); + PointNormal hint_pn = std::make_pair(hint.first, hint_n/sqrt(hint_n.squared_length())); + // hint_pn = std::make_pair(Point(10000,10000,10000),Normal(0,0,1)); + Projection_n_traits projection_traits(hint_pn, hint.second, eps); + this->traversal(query, projection_traits); + return projection_traits.closest_point_and_primitive(); + } + + template + bool do_intersect(const Query& query) const + { + //using namespace CGAL::internal::AABB_tree; + Do_intersect_noself_traits traversal_traits; + this->traversal(query, traversal_traits); + return traversal_traits.is_intersection_found(); + } + + typename AABBTraits::FT eps; + }; +} + +typedef CGAL::AABB_n_traits AABB_n_triangle_traits; +typedef AABB_n_triangle_traits::Point_and_primitive_id Point_and_Primitive_id; +typedef CGAL::AABB_n_tree Tree; + +struct TreeAndTri { + TreeAndTri(const array* p_mesh_tri, + const array* p_mesh_points, + const double eps, + const size_t T, + const size_t P) + { + std::vector mesh_points; + mesh_points.reserve(P); + for(size_t pp=0; pp triangles; + Tree tree; +}; + +template +boost::uint64_t wrapPointer(T *ptr) { + return reinterpret_cast(ptr); +} +template +T* unwrapPointer(uint64_t ptr) { + return reinterpret_cast(ptr); +} + +#endif // AABB_N_TREE_H + +/***EMACS SETTINGS***/ +/* Local Variables: */ +/* tab-width: 2 */ +/* End: */ diff --git a/mesh-master/mesh/src/aabb_normals.cpp b/mesh-master/mesh/src/aabb_normals.cpp new file mode 100644 index 0000000000000000000000000000000000000000..758ec4e133fee9fdf57d3164203d59ec8bd10c6f --- /dev/null +++ b/mesh-master/mesh/src/aabb_normals.cpp @@ -0,0 +1,190 @@ + +// needed to avoid the link to debug "_d.lib" libraries +#include "hijack_python_headers.hpp" +#include + +#ifdef _OPENMP +#include +#endif + +#include "AABB_n_tree.h" +#include "cgal_error_emulation.hpp" + +typedef uint32_t Index; + +static PyObject * aabbtree_normals_compute(PyObject *self, PyObject *args); +static PyObject * aabbtree_normals_nearest(PyObject *self, PyObject *args); +static PyObject * aabbtree_normals_selfintersects(PyObject *self, PyObject *args); + +static PyMethodDef SpatialsearchMethods[] = { + { "aabbtree_n_compute", + aabbtree_normals_compute, + METH_VARARGS, + "aabbtree_n_compute."}, + { "aabbtree_n_nearest", + aabbtree_normals_nearest, + METH_VARARGS, + "aabbtree_n_nearest."}, + { "aabbtree_n_selfintersects", + aabbtree_normals_selfintersects, + METH_VARARGS, + "aabbtree_n_selfintersects."}, + {NULL, NULL, 0, NULL} /* Sentinel */ +}; + +static struct PyModuleDef moduleDef = +{ + PyModuleDef_HEAD_INIT, + "aabb_normals", /* name of module */ + "", /* module documentation, may be NULL */ + -1, /* size of per-interpreter state of the module, or -1 if the module keeps state in global variables. */ + SpatialsearchMethods +}; + +PyMODINIT_FUNC PyInit_aabb_normals(void) +{ + PyObject *module = PyModule_Create(&moduleDef); + + import_array(); + + return module; +} + +void aabb_tree_destructor(PyObject *ptr) +{ + TreeAndTri* search = (TreeAndTri*) PyCapsule_GetPointer(ptr, NULL); + delete search; +} + + +static PyObject * +aabbtree_normals_compute(PyObject *self, PyObject *args) +{ + PyArrayObject *py_v = NULL, *py_f = NULL; + + double eps; + if (!PyArg_ParseTuple(args, "O!O!d", &PyArray_Type, &py_v, &PyArray_Type, &py_f, &eps)) + return NULL; + + if (py_v->descr->type_num != NPY_DOUBLE || py_v->nd != 2) { + PyErr_SetString(PyExc_ValueError, "Vertices must be of type double, and 2 dimensional"); + return NULL; + } + if (py_f->descr->type_num != NPY_UINT32 || py_f->nd != 2) { + PyErr_SetString(PyExc_ValueError, "Faces must be of type uint32, and 2 dimensional"); + return NULL; + } + + npy_intp* v_dims = PyArray_DIMS(py_v); + npy_intp* f_dims = PyArray_DIMS(py_f); + + if (v_dims[1] != 3 || f_dims[1] != 3) { + PyErr_SetString(PyExc_ValueError, "Input must be Nx3"); + return NULL; + } + + double *pV = (double*)PyArray_DATA(py_v); + uint32_t *pF = (uint32_t*)PyArray_DATA(py_f); + + size_t P = v_dims[0]; + size_t T = f_dims[0]; + + array* m_mesh_tri=reinterpret_cast*>(pF); + array* m_mesh_points=reinterpret_cast*>(pV); + + try + { + TreeAndTri* search = new TreeAndTri(m_mesh_tri,m_mesh_points,eps,T,P); + + PyObject* result = PyCapsule_New((void*)search, NULL, aabb_tree_destructor); + return result; + } + catch (mesh_aabb_tree_error&) + { + return Py_None; + } + +} + +static PyObject * +aabbtree_normals_nearest(PyObject *self, PyObject *args) +{ + PyObject *py_tree, *py_v, *py_n; + if (!PyArg_ParseTuple(args, "OOO", &py_tree, &py_v, &py_n)) + return NULL; + + TreeAndTri *search = (TreeAndTri *) PyCapsule_GetPointer(py_tree, NULL); + + npy_intp* v_dims = PyArray_DIMS(py_v); + npy_intp* n_dims = PyArray_DIMS(py_n); + + if (v_dims[1] != 3) { + PyErr_SetString(PyExc_ValueError, "Input must be Nx3"); + return NULL; + } + if (n_dims[1] != v_dims[1] || n_dims[0] != v_dims[0]){ + PyErr_SetString(PyExc_ValueError, "Normals should have same dimensions as points"); + return NULL; + } + + size_t S=v_dims[0]; + + array* m_sample_points=reinterpret_cast*>(PyArray_DATA(py_v)); + array* m_sample_n=reinterpret_cast*>(PyArray_DATA(py_n)); + + #ifdef _OPENMP + omp_set_num_threads(8); + #endif + + std::vector sample_points; + sample_points.reserve(S); + for(size_t ss=0; ss(PyArray_DATA(result1)); + array* closest_point=NULL; + //if(1) { //nlhs > 1) { + npy_intp result2_dims[] = {S, 3}; + PyObject *result2 = PyArray_SimpleNew(2, result2_dims, NPY_DOUBLE); + closest_point=reinterpret_cast*>(PyArray_DATA(result2)); + //} + + #pragma omp parallel for + for(size_t ss=0; sstree.closest_point_and_primitive(sample_points[ss]); + closest_triangles[ss]=std::distance(search->triangles.begin(), closest.second); + //if(nlhs > 1) { + for(size_t cc=0; cc<3; ++cc) { + closest_point[ss][cc]=closest.first[cc]; + } + //} + } + return Py_BuildValue("NN", result1, result2); +} + +static PyObject * +aabbtree_normals_selfintersects(PyObject *self, PyObject *args) +{ + int n_intersections = 0; + PyObject *py_tree; + if (!PyArg_ParseTuple(args, "O", &py_tree)) + return NULL; + + TreeAndTri *search = (TreeAndTri *) PyCapsule_GetPointer(py_tree, NULL); + + for(Iterator it=search->triangles.begin();it!=search->triangles.end();++it) + if(search->tree.do_intersect(*it)) + ++n_intersections; + + return Py_BuildValue("i", n_intersections); +} diff --git a/mesh-master/mesh/src/cgal_error_emulation.hpp b/mesh-master/mesh/src/cgal_error_emulation.hpp new file mode 100644 index 0000000000000000000000000000000000000000..10299158a9e508f3e80f457797390453f7669e7d --- /dev/null +++ b/mesh-master/mesh/src/cgal_error_emulation.hpp @@ -0,0 +1,37 @@ +//!@file +//! Implements a trick to avoid linking with CGAL by defining its own (and +//! unique) error report function. +//!@author Raffi Enficiaud + +#ifndef MESH_CGAL_ERROR_EMULATION_HPP__ +#define MESH_CGAL_ERROR_EMULATION_HPP__ + +// exception object +struct mesh_aabb_tree_error {}; + +#if defined(MESH_CGAL_AVOID_COMPILED_VERSION) + +#include +#include + +// this hack makes it possible to avoid linking with CGAL. +namespace CGAL { + void assertion_fail( + const char* expr, + const char* file, + int line, + const char* msg) + { + std::ostringstream o; + o << "An exception has been caugth during the execution:" << std::endl; + o << "- file: " << file << std::endl; + o << "- line: " << line << std::endl; + o << "- error: " << msg << std::endl; + + PyErr_SetString(PyExc_RuntimeError, o.str().c_str()); + throw mesh_aabb_tree_error(); + } +} +#endif + +#endif /* MESH_CGAL_ERROR_EMULATION_HPP__ */ diff --git a/mesh-master/mesh/src/hijack_python_headers.hpp b/mesh-master/mesh/src/hijack_python_headers.hpp new file mode 100644 index 0000000000000000000000000000000000000000..9dd674e8950536fa71b5a5695c8608153269eea9 --- /dev/null +++ b/mesh-master/mesh/src/hijack_python_headers.hpp @@ -0,0 +1,25 @@ +#ifndef MEAH_INCLUDE_PYTHON_HEADER_HPP__ +#define MEAH_INCLUDE_PYTHON_HEADER_HPP__ + +/*!@file + * This file hijacks the inclusion of the python libraries on Windows to + * prevent the linking with the debug version of python.lib (that is named + * python_d.lib and that is not provided by default). + */ + +#undef MESH_HIJACK_AUTO_LINK + +#if defined(_WIN32) && defined(_DEBUG) + #define MESH_HIJACK_AUTO_LINK + #undef _DEBUG +#endif + +#include + +#if defined(MESH_HIJACK_AUTO_LINK) + #define _DEBUG + #undef MESH_HIJACK_AUTO_LINK +#endif + + +#endif /* MEAH_INCLUDE_PYTHON_HEADER_HPP__ */ \ No newline at end of file diff --git a/mesh-master/mesh/src/nearest_point_triangle_3.h b/mesh-master/mesh/src/nearest_point_triangle_3.h new file mode 100644 index 0000000000000000000000000000000000000000..620045667cad328e2a642ec9fb694b10d46a9333 --- /dev/null +++ b/mesh-master/mesh/src/nearest_point_triangle_3.h @@ -0,0 +1,160 @@ +// A variant of CGAL/internal/AABB_tree/nearest_point_triangle_3.h +// it differs in that it records which part of the triangle (interior, edge, or vertex) +// the query point is closest to + +#ifndef NEAREST_POINT_TRIANGLE_3_INTERIOR_EDGE_VERTEX_H_ +#define NEAREST_POINT_TRIANGLE_3_INTERIOR_EDGE_VERTEX_H_ + +#include +#include + + +namespace CGAL { +namespace iev { +// tests whether a query point q (assumed to be in the plane of the triangle) +// is inside or outside a triangle edge (p1,p2) +// returns true iff q is outside +// q is inside (p1,p2) if: +// q is on the correct side of the line through (p1,p2) +// q's projection on this line lies between p1 and p2 +// if q is outside the edge (and therefore the triangle) but projects between p1 and p2 +// set outside=true and result=the projection of q onto the line through p1 and p2 +template +inline +bool +is_inside_triangle_3_aux(const typename K::Vector_3& w, // scaled triangle normal (b-a) x (c-b) + const typename K::Point_3& p1, + const typename K::Point_3& p2, + const typename K::Point_3& q, // query point + typename K::Point_3& result, + bool& outside, + const K& k) +{ + typedef typename K::Vector_3 Vector_3; + typedef typename K::FT FT; + + typename K::Construct_vector_3 vector = + k.construct_vector_3_object(); + typename K::Construct_projected_point_3 projection = + k.construct_projected_point_3_object(); + typename K::Construct_line_3 line = + k.construct_line_3_object(); + typename K::Compute_scalar_product_3 scalar_product = + k.compute_scalar_product_3_object(); + typename K::Construct_cross_product_vector_3 cross_product = + k.construct_cross_product_vector_3_object(); + + const Vector_3 v = cross_product(vector(p1,p2), vector(p1,q)); + if ( scalar_product(v,w) < FT(0)) + { + if ( scalar_product(vector(p1,q), vector(p1,p2)) >= FT(0) + && scalar_product(vector(p2,q), vector(p2,p1)) >= FT(0) ) + { + result = projection(line(p1, p2), q); + return true; + } + outside = true; + } + + return false; +} + + +/** + * Returns the nearest point of p1,p2,p3 from origin + * @param origin the origin point + * @param p1 the first point + * @param p2 the second point + * @param p3 the third point + * @param k the kernel + * @return the nearest point from origin + */ +template +inline +typename K::Point_3 +nearest_point_3(const typename K::Point_3& origin, + const typename K::Point_3& p1, + const typename K::Point_3& p2, + const typename K::Point_3& p3, + const K& k) +{ + typedef typename K::FT FT; + + typename K::Compute_squared_distance_3 sq_distance = + k.compute_squared_distance_3_object(); + + const FT dist_origin_p1 = sq_distance(origin,p1); + const FT dist_origin_p2 = sq_distance(origin,p2); + const FT dist_origin_p3 = sq_distance(origin,p3); + + if ( dist_origin_p2 >= dist_origin_p1 + && dist_origin_p3 >= dist_origin_p1 ) + { + return p1; + } + if ( dist_origin_p3 >= dist_origin_p2 ) + { + return p2; + } + + return p3; +} + +/** + * @brief returns true if p is inside triangle t. If p is not inside t, + * result is the nearest point of t from p. WARNING: it is assumed that + * t and p are on the same plane. + * @param p the reference point + * @param t the triangle + * @param result if p is not inside t, the nearest point of t from p + * @param k the kernel + * @return true if p is inside t + */ +template +inline +int +nearest_primitive(const typename K::Point_3& origin, + const typename K::Triangle_3& t, + typename K::Point_3& result, + const K& k) +{ + typedef typename K::Point_3 Point_3; + typedef typename K::Vector_3 Vector_3; + + typename K::Construct_vector_3 vector = + k.construct_vector_3_object(); + typename K::Construct_vertex_3 vertex_on = + k.construct_vertex_3_object(); + typename K::Construct_cross_product_vector_3 cross_product = + k.construct_cross_product_vector_3_object(); + typename K::Construct_supporting_plane_3 supporting_plane = + k.construct_supporting_plane_3_object(); + typename K::Construct_projected_point_3 projection = + k.construct_projected_point_3_object(); + + const Point_3 p = projection(supporting_plane(t), origin); + const Point_3& t0 = vertex_on(t,0); + const Point_3& t1 = vertex_on(t,1); + const Point_3& t2 = vertex_on(t,2); + + Vector_3 w = cross_product(vector(t0,t1), vector(t1,t2)); + + bool outside = false; + if(is_inside_triangle_3_aux(w, t0, t1, p, result, outside, k)) { return 1; } + if(is_inside_triangle_3_aux(w, t1, t2, p, result, outside, k)) { return 2; } + if(is_inside_triangle_3_aux(w, t2, t0, p, result, outside, k)) { return 3; } + if(outside) { + result = nearest_point_3(p,t0,t1,t2,k); + if(result == t0) { return 4; } + if(result == t1) { return 5; } + if(result == t2) { return 6; } + } + + return 0; +} + +} // end namespace iev +} // end namespace CGAL + + +#endif // NEAREST_POINT_TRIANGLE_3_H_ diff --git a/mesh-master/mesh/src/nearest_triangle.hpp b/mesh-master/mesh/src/nearest_triangle.hpp new file mode 100644 index 0000000000000000000000000000000000000000..a609e10672140b590383b80cec4b1dfc8054f7f7 --- /dev/null +++ b/mesh-master/mesh/src/nearest_triangle.hpp @@ -0,0 +1,52 @@ +#ifndef __NEAREST_TRIANGLE_HPP__ +#define __NEAREST_TRIANGLE_HPP__ + +#include + +#define CGAL_CFG_NO_CPP0X_VARIADIC_TEMPLATES 1 +#include // must be inserted before kernel +#include +#include +#include + +#include + +#include +#include + + +typedef CGAL::Simple_cartesian K; +using boost::uint32_t; +using boost::uint64_t; +using boost::array; +using std::vector; + +typedef CGAL::AABB_triangle_primitive::iterator > Primitive; +typedef CGAL::AABB_traits AABB_triangle_traits; +typedef CGAL::AABB_tree Tree; + +struct TreeAndTri { + vector triangles; + vector points; + Tree tree; +}; + +template +boost::uint64_t wrapPointer(T *ptr) { + return reinterpret_cast(ptr); +} +template +T* unwrapPointer(uint64_t ptr) { + return reinterpret_cast(ptr); +} + +class Mesh_IntersectionsException: public std::exception { +public: + Mesh_IntersectionsException(std::string m="Mesh_IntersectionsException!"):msg(m) {} + ~Mesh_IntersectionsException() throw() {} + const char* what() const throw() { return msg.c_str(); } +private: + std::string msg; +}; + +#endif diff --git a/mesh-master/mesh/src/nearest_triangle_normals.hpp b/mesh-master/mesh/src/nearest_triangle_normals.hpp new file mode 100644 index 0000000000000000000000000000000000000000..37038e30f0af56b87acb9d63de949ea88ded53e1 --- /dev/null +++ b/mesh-master/mesh/src/nearest_triangle_normals.hpp @@ -0,0 +1,56 @@ +#ifndef __NEAREST_TRIANGLE_HPP__ +#define __NEAREST_TRIANGLE_HPP__ + +#include + +#define CGAL_CFG_NO_CPP0X_VARIADIC_TEMPLATES 1 +#include // must be inserted before kernel +#include +#include +#include + +#include +#include + + +typedef CGAL::Simple_cartesian K; +using boost::uint32_t; +using boost::uint64_t; +using boost::array; +using std::vector; + +typedef CGAL::AABB_triangle_primitive::iterator > Primitive; +typedef CGAL::AABB_traits AABB_triangle_traits; +typedef CGAL::AABB_tree Tree; + +typedef CGAL::Vector_3 Normal; + +struct TreeAndTri { + vector triangles; + Tree tree; +}; + +template +boost::uint64_t wrapPointer(T *ptr) { + return reinterpret_cast(ptr); +} +template +T* unwrapPointer(uint64_t ptr) { + return reinterpret_cast(ptr); +} + +#include +#include + +typedef CGAL::Search_traits_3 IncTreeTraits; +typedef CGAL::Orthogonal_incremental_neighbor_search INN_search; +typedef INN_search::iterator INN_iterator; +typedef INN_search::Tree ITree; + +struct ITreeAndNormals{ + vector normals; + vector centers; + ITree itree; +}; + +#endif diff --git a/mesh-master/mesh/src/plyutils.c b/mesh-master/mesh/src/plyutils.c new file mode 100644 index 0000000000000000000000000000000000000000..15ab12d5c0863a8909d83764398abe087aead9d6 --- /dev/null +++ b/mesh-master/mesh/src/plyutils.c @@ -0,0 +1,290 @@ +#include "plyutils.h" + +static PyMethodDef PlyutilsMethods[] = { + {"read", plyutils_read, METH_VARARGS, "Read a PLY file."}, + {"write", plyutils_write, METH_VARARGS, "Write a PLY file."}, + {NULL, NULL, 0, NULL} +}; + +static struct PyModuleDef moduleDef = +{ + PyModuleDef_HEAD_INIT, + "serialization.plyutils", /* name of module */ + "", /* module documentation, may be NULL */ + -1, /* size of per-interpreter state of the module, or -1 if the module keeps state in global variables. */ + PlyutilsMethods +}; + +static PyObject *PlyutilsError; + +PyMODINIT_FUNC PyInit_plyutils(void) { + PyObject *m = PyModule_Create(&moduleDef); + if (m == NULL) + return NULL; + + PlyutilsError = PyErr_NewException("plyutils.error", NULL, NULL); + Py_INCREF(PlyutilsError); + PyModule_AddObject(m, "error", PlyutilsError); + + return m; +} + +int has_color(p_ply ply) { + p_ply_element el = NULL; + p_ply_property p = NULL; + const char *name; + while ((el = ply_get_next_element(ply, el))) { + if (ply_get_element_info(el, &name, NULL) && !strcmp(name, "vertex")) { + while ((p = ply_get_next_property(el, p))) { + if (ply_get_property_info(p, &name, NULL, NULL, NULL) + && (!strcmp(name, "red") || !strcmp(name, "green") || !strcmp(name, "blue"))) + return 1; + } + } + } + return 0; +} + +int has_normals(p_ply ply) { + p_ply_element el = NULL; + p_ply_property p = NULL; + const char *name; + while ((el = ply_get_next_element(ply, el))) { + if (ply_get_element_info(el, &name, NULL) && !strcmp(name, "vertex")) { + while ((p = ply_get_next_property(el, p))) { + if (ply_get_property_info(p, &name, NULL, NULL, NULL) + && (!strcmp(name, "nx") || !strcmp(name, "ny") || !strcmp(name, "nz"))) + return 1; + } + } + } + return 0; +} + +static PyObject * plyutils_read(PyObject *self, PyObject *args) +{ + const char *filename; + p_ply ply = NULL; + int use_color, use_normals; + long n_verts, n_faces; + PyObject *x, *y, *z, *r, *g, *b; + PyObject *nx, *ny, *nz; + PyObject *tri; + + if (!PyArg_ParseTuple(args, "s", &filename)) { + PyErr_SetString(PlyutilsError, "plyutils.read doesn't know what to do without a filename."); + return NULL; + } + ply = ply_open(filename, error_cb); + if (!ply) { + PyErr_SetString(PlyutilsError, "Failed to open PLY file."); + return NULL; + } + if (!ply_read_header(ply)) { + PyErr_SetString(PlyutilsError, "plyread_mex: Bad raw header."); + return NULL; + } + + use_color = has_color(ply); + use_normals = has_normals(ply); + + n_verts = ply_set_read_cb(ply, "vertex", "x", vertex_cb, (void*)&x, 0); + ply_set_read_cb(ply, "vertex", "y", vertex_cb, (void*)&y, 0); + ply_set_read_cb(ply, "vertex", "z", vertex_cb, (void*)&z, 0); + if (use_color) { + ply_set_read_cb(ply, "vertex", "red", vertex_cb, (void*)&r, 0); + ply_set_read_cb(ply, "vertex", "green", vertex_cb, (void*)&g, 0); + ply_set_read_cb(ply, "vertex", "blue", vertex_cb, (void*)&b, 0); + } + if (use_normals) { + ply_set_read_cb(ply, "vertex", "nx", vertex_cb, (void*)&nx, 0); + ply_set_read_cb(ply, "vertex", "ny", vertex_cb, (void*)&ny, 0); + ply_set_read_cb(ply, "vertex", "nz", vertex_cb, (void*)&nz, 0); + } + n_faces = ply_set_read_cb(ply, "face", "vertex_indices", face_cb, (void*)&tri, 0); + if (n_faces==0) + n_faces = ply_set_read_cb(ply, "face", "vertex_index", face_cb, (void*)&tri, 0); + + x = PyList_New(n_verts); y = PyList_New(n_verts); z = PyList_New(n_verts); + if (use_color) { + r = PyList_New(n_verts); g = PyList_New(n_verts); b = PyList_New(n_verts); + } + if (use_normals) { + nx = PyList_New(n_verts); ny = PyList_New(n_verts); nz = PyList_New(n_verts); + } + tri = Py_BuildValue("[N,N,N]", PyList_New(n_faces), PyList_New(n_faces), PyList_New(n_faces)); + + if (!ply_read(ply)) { + char * msg = "Read failed. "; + char* catString = malloc(strlen(msg)+strlen(filename)+1); + strcpy(catString, msg); + strcat(catString, filename); + + PyErr_SetString(PlyutilsError, catString); + // use the string then delete it when you're done. + free(catString); + return NULL; + } + ply_close(ply); + + if (use_color && !use_normals) + return Py_BuildValue("{s:[N,N,N],s:N,s:[N,N,N]}", "pts", x, y, z, "tri", tri, "color", r, g, b); + if (!use_color && use_normals) + return Py_BuildValue("{s:[N,N,N],s:N,s:[N,N,N]}", "pts", x, y, z, "tri", tri, "normals", nx, ny, nz); + if (use_color && use_normals) + return Py_BuildValue("{s:[N,N,N],s:N,s:[N,N,N],s:[N,N,N]}", "pts", x, y, z, "tri", tri, "color", r, g, b, "normals", nx, ny, nz); + else + return Py_BuildValue("{s:[N,N,N],s:N}", "pts", x, y, z, "tri", tri); +} + +static PyObject * plyutils_write(PyObject *self, PyObject *args) +{ + const char *filename; + PyObject *pts, *tri, *color, *ascii, *little_endian, *comments; + PyObject *normals = NULL; + int use_color, use_normals, res; + p_ply ply = NULL; + PyObject *row; + long ii; + const char *comment; + + if (!PyArg_ParseTuple(args, "OOOsO|O|OO", &pts, &tri, &color, &filename, &ascii, &little_endian, &comments, &normals)) + return NULL; + + use_color = (PyList_Size(pts) == PyList_Size(color)); + use_normals = 0; + if (normals!=NULL) + use_normals = (PyList_Size(pts) == PyList_Size(normals)); + + if (ascii == Py_True) + ply = ply_create(filename, PLY_ASCII, error_cb); + else { + if (little_endian == Py_True) + ply = ply_create(filename, PLY_LITTLE_ENDIAN, error_cb); + else + ply = ply_create(filename, PLY_BIG_ENDIAN, error_cb); + } + + if (!ply) { + PyErr_SetString(PlyutilsError, "Failed to create PLY file."); + return NULL; + } + + res = 1; + + for (ii = 0; ii < PyList_Size(comments); ++ii) { + comment = PyBytes_AsString(PyObject_Str(PyList_GetItem(comments, ii))); + res &= ply_add_comment(ply, comment); + } + + res &= ply_add_element(ply, "vertex", PyList_Size(pts)); + res &= ply_add_scalar_property(ply, "x", PLY_FLOAT); + res &= ply_add_scalar_property(ply, "y", PLY_FLOAT); + res &= ply_add_scalar_property(ply, "z", PLY_FLOAT); + + if(use_normals){ + res &= ply_add_scalar_property(ply, "nx", PLY_FLOAT); + res &= ply_add_scalar_property(ply, "ny", PLY_FLOAT); + res &= ply_add_scalar_property(ply, "nz", PLY_FLOAT); + } + + if(use_color){ + res &= ply_add_scalar_property(ply, "red", PLY_UCHAR); + res &= ply_add_scalar_property(ply, "green", PLY_UCHAR); + res &= ply_add_scalar_property(ply, "blue", PLY_UCHAR); + } + + res &= ply_add_element(ply, "face", PyList_Size(tri)); + res &= ply_add_list_property(ply, "vertex_indices", PLY_UCHAR, PLY_INT); + + res &= ply_write_header(ply); + if (!res) { + PyErr_SetString(PlyutilsError, "Failed to write header."); + return NULL; + } + + + + for (ii = 0; ii < PyList_Size(pts); ++ii) { + row = PyList_GetItem(pts, ii); + res &= ply_write(ply, PyFloat_AsDouble(PyList_GetItem(row, 0))); + res &= ply_write(ply, PyFloat_AsDouble(PyList_GetItem(row, 1))); + res &= ply_write(ply, PyFloat_AsDouble(PyList_GetItem(row, 2))); + if(use_normals){ + row = PyList_GetItem(normals, ii); + res &= ply_write(ply, PyFloat_AsDouble(PyList_GetItem(row, 0))); + res &= ply_write(ply, PyFloat_AsDouble(PyList_GetItem(row, 1))); + res &= ply_write(ply, PyFloat_AsDouble(PyList_GetItem(row, 2))); + } + if(use_color){ + row = PyList_GetItem(color, ii); + res &= ply_write(ply, (unsigned char)PyLong_AsUnsignedLongMask(PyList_GetItem(row, 0))); + res &= ply_write(ply, (unsigned char)PyLong_AsUnsignedLongMask(PyList_GetItem(row, 1))); + res &= ply_write(ply, (unsigned char)PyLong_AsUnsignedLongMask(PyList_GetItem(row, 2))); + } + } + if (!res) { + PyErr_SetString(PlyutilsError, "Error writing points."); + return NULL; + } + + for (ii = 0; ii < PyList_Size(tri); ++ii) { + row = PyList_GetItem(tri, ii); + res &= ply_write(ply, 3); + res &= ply_write(ply, PyFloat_AsDouble(PyList_GetItem(row, 0))); + res &= ply_write(ply, PyFloat_AsDouble(PyList_GetItem(row, 1))); + res &= ply_write(ply, PyFloat_AsDouble(PyList_GetItem(row, 2))); + } + if (!res) { + PyErr_SetString(PlyutilsError, "Error writing faces."); + return NULL; + } + + ply_close(ply); + Py_INCREF(Py_None); + return Py_None; +} + +int vertex_cb(p_ply_argument argument) { + void *p; PyObject* list; + long ii; + PyObject *val; + + ply_get_argument_element(argument, NULL, &ii); + ply_get_argument_user_data(argument, &p, NULL); + list = (PyObject*)(*(void**)p); + + val = PyFloat_FromDouble(ply_get_argument_value(argument)); + // PyList_Append(list, val); + // Py_DECREF(val); + PyList_SET_ITEM(list, ii, val); + + return 1; +} + +int face_cb(p_ply_argument argument) { + void *p; PyObject* tri; + long ii; + long length, value_index; + PyObject *val; + + ply_get_argument_element(argument, NULL, &ii); + ply_get_argument_user_data(argument, &p, NULL); + tri = (PyObject*)(*(void**)p); + + ply_get_argument_property(argument, NULL, &length, &value_index); + if (value_index >= 0 && value_index < PyList_Size(tri)) { + PyObject* slice = PyList_GetItem(tri, value_index); + + val = PyFloat_FromDouble(ply_get_argument_value(argument)); + // PyList_Append(slice, val); + // Py_DECREF(val); + PyList_SET_ITEM(slice, ii, val); + } + + return 1; +} + +void error_cb(const char *message) { + PyErr_SetString(PlyutilsError, message); +} diff --git a/mesh-master/mesh/src/plyutils.h b/mesh-master/mesh/src/plyutils.h new file mode 100644 index 0000000000000000000000000000000000000000..b6c07b96c374243844cbbad11a29575034f943ab --- /dev/null +++ b/mesh-master/mesh/src/plyutils.h @@ -0,0 +1,14 @@ +#ifndef PLYUTILS_H__ +#define PLYUTILS_H__ + +// needed to avoid the link to debug "_d.lib" libraries +#include "hijack_python_headers.hpp" +#include "rply.h" + +static PyObject * plyutils_read(PyObject *self, PyObject *args); +static PyObject * plyutils_write(PyObject *self, PyObject *args); +void error_cb(const char *message); +int vertex_cb(p_ply_argument argument); +int face_cb(p_ply_argument argument); + +#endif /* PLYUTILS_H__ */ \ No newline at end of file diff --git a/mesh-master/mesh/src/py_loadobj.cpp b/mesh-master/mesh/src/py_loadobj.cpp new file mode 100644 index 0000000000000000000000000000000000000000..f28b14d7b1198b2b89e47b26e9e63ffefbc24356 --- /dev/null +++ b/mesh-master/mesh/src/py_loadobj.cpp @@ -0,0 +1,242 @@ + +// needed to avoid the link to debug "_d.lib" libraries +#include "hijack_python_headers.hpp" + +#include +#include +#include +#include +using boost::uint32_t; +using boost::array; + +#include +#include +#include +#include +#include +#include + +class LoadObjException: public std::exception { +public: + LoadObjException(std::string m="loadObjException!"):msg(m) {} + ~LoadObjException() throw() {} + const char* what() const throw() { return msg.c_str(); } +private: + std::string msg; +}; + +static PyObject * +loadobj(PyObject *self, PyObject *args, PyObject *keywds); + +static PyObject *LoadObjError; + +static PyMethodDef loadobj_methods[] = { + {"loadobj", (PyCFunction) loadobj, + METH_VARARGS | METH_KEYWORDS, "loadobj."}, + {NULL, NULL, 0, NULL} /* Sentinel */ +}; + +static struct PyModuleDef moduleDef = +{ + PyModuleDef_HEAD_INIT, + "serialization.loadobj", /* name of module */ + "", /* module documentation, may be NULL */ + -1, /* size of per-interpreter state of the module, or -1 if the module keeps state in global variables. */ + loadobj_methods +}; + +PyMODINIT_FUNC PyInit_loadobj(void) { + PyObject *m = PyModule_Create(&moduleDef); + if (m == NULL) + return NULL; + + import_array(); + LoadObjError = PyErr_NewException(const_cast("loadobj.LoadObjError"), NULL, NULL); + Py_INCREF(LoadObjError); + PyModule_AddObject(m, "LoadObjError", LoadObjError); + + return m; +} + +static PyObject * +loadobj(PyObject *self, PyObject *args, PyObject *keywds) +{ + try { + char py_objpatharr[256]; + char *py_objpath = static_cast(py_objpatharr); + + // a copy of the literal string is done into a (non const) char + char key1[] = "obj_path"; + static char* kwlist[] = {key1, NULL}; + + if (!PyArg_ParseTupleAndKeywords(args, keywds, "s", kwlist, &py_objpath)) + return NULL; + + std::ifstream obj_is(py_objpath,std::ios_base::binary | std::ios_base::in); + if (!obj_is) { + PyErr_SetString(PyExc_ValueError, "Could not load file"); + return NULL; + } + + std::vector v; + std::vector vt; + std::vector vn; + std::vector f; + std::vector ft; + std::vector fn; + v.reserve(30000); + vt.reserve(30000); + vn.reserve(30000); + f.reserve(100000); + ft.reserve(100000); + fn.reserve(100000); + std::map > segm; + + bool next_v_is_land = false; + std::string land_name(""); + std::map landm; + + std::string line; + std::string curr_segm(""); + std::string mtl_path(""); + unsigned len_vt = 3; + while (getline(obj_is, line)) { + if (line.substr(0,6) == "mtllib") { + mtl_path = line.substr(6); + } + + if (line.substr(0,1) == "g"){ + curr_segm = line.substr(2); + if (segm.find(curr_segm) == segm.end()) + segm[curr_segm] = std::vector(); + } + if (line.substr(0,2) == "vt"){ + std::istringstream is(line.substr(2)); + unsigned orig_vt_len = vt.size(); + std::copy(std::istream_iterator(is), + std::istream_iterator(), + std::back_inserter(vt)); + len_vt = vt.size() - orig_vt_len; + } + else if (line.substr(0,2) == "vn"){ + std::istringstream is(line.substr(2)); + std::copy(std::istream_iterator(is), + std::istream_iterator(), + std::back_inserter(vn)); + } + else if (line.substr(0,1) == "f"){ + std::istringstream is(line.substr(1)); + std::istream_iterator it(is); + const std::string delims("/"); + std::vector localf, localfn, localft; + for(;it!=std::istream_iterator();++it){ + // valid: v v/vt v/vt/vn v//vn + unsigned counter=0; + std::istringstream unparsed_face(*it); + std::string el; + while(std::getline(unparsed_face, el, '/')) { + if (el.size() > 0) { // if the element has contents + if (counter == 0) + localf.push_back(atoi(el.c_str())); + if (counter == 1) + localft.push_back(atoi(el.c_str())); + if (counter == 2) + localfn.push_back(atoi(el.c_str())); + } + counter++; + } + } + if (localf.size() > 0) { + for (int i=1; i<(localf.size()-1); ++i) { + f.push_back(localf[0] - 1); + f.push_back(localf[i] - 1); + f.push_back(localf[i+1] - 1); + if (curr_segm != "") + segm.find(curr_segm)->second.push_back((f.size()/3)-1); + } + } + if (localft.size() > 0) { + for (int i=1; i<(localft.size()-1); ++i){ + ft.push_back(localft[0] - 1); + ft.push_back(localft[i] - 1); + ft.push_back(localft[i+1] - 1); + } + } + if (localfn.size() > 0) { + for (int i=1; i<(localfn.size()-1); ++i){ + fn.push_back(localfn[0] - 1); + fn.push_back(localfn[i] - 1); + fn.push_back(localfn[i+1] - 1); + } + } + } + else if (line.substr(0,1) == "v"){ + std::istringstream is(line.substr(1)); + std::copy(std::istream_iterator(is), + std::istream_iterator(), + std::back_inserter(v)); + if (next_v_is_land) { + next_v_is_land = false; + landm[land_name.c_str()] = v.size()/3-1; + } + } + else if (line.substr(0,9) == "#landmark"){ + next_v_is_land = true; + land_name = line.substr(10); + } + } + + unsigned n_v = v.size()/3; + unsigned n_vt = vt.size()/len_vt; + unsigned n_vn = vn.size()/3; + unsigned n_f = f.size()/3; + unsigned n_ft = ft.size()/3; + unsigned n_fn = fn.size()/3; + npy_intp v_dims[] = {n_v,3}; + npy_intp vn_dims[] = {n_vn,3}; + npy_intp vt_dims[] = {n_vt,len_vt}; + npy_intp f_dims[] = {n_f,3}; + npy_intp ft_dims[] = {n_ft,3}; + npy_intp fn_dims[] = {n_fn,3}; + /* + // XXX Memory from vectors get deallocated! + PyObject *py_v = PyArray_SimpleNewFromData(2, v_dims, NPY_DOUBLE, v.data()); + PyObject *py_vt = PyArray_SimpleNewFromData(2, vt_dims, NPY_DOUBLE, vt.data()); + PyObject *py_vn = PyArray_SimpleNewFromData(2, vn_dims, NPY_DOUBLE, vn.data()); + PyObject *py_f = PyArray_SimpleNewFromData(2, f_dims, NPY_UINT32, f.data()); + PyObject *py_ft = PyArray_SimpleNewFromData(2, ft_dims, NPY_UINT32, ft.data()); + PyObject *py_fn = PyArray_SimpleNewFromData(2, fn_dims, NPY_UINT32, fn.data()); + */ + // The following copy would be faster in C++11 with move semantics + PyObject *py_v = PyArray_SimpleNew(2, v_dims, NPY_DOUBLE); + std::copy(v.begin(), v.end(), reinterpret_cast(PyArray_DATA(py_v))); + PyObject *py_vt = PyArray_SimpleNew(2, vt_dims, NPY_DOUBLE); + std::copy(vt.begin(), vt.end(), reinterpret_cast(PyArray_DATA(py_vt))); + PyObject *py_vn = PyArray_SimpleNew(2, vn_dims, NPY_DOUBLE); + std::copy(vn.begin(), vn.end(), reinterpret_cast(PyArray_DATA(py_vn))); + PyObject *py_f = PyArray_SimpleNew(2, f_dims, NPY_UINT32); + std::copy(f.begin(), f.end(), reinterpret_cast(PyArray_DATA(py_f))); + PyObject *py_ft = PyArray_SimpleNew(2, ft_dims, NPY_UINT32); + std::copy(ft.begin(), ft.end(), reinterpret_cast(PyArray_DATA(py_ft))); + PyObject *py_fn = PyArray_SimpleNew(2, fn_dims, NPY_UINT32); + std::copy(fn.begin(), fn.end(), reinterpret_cast(PyArray_DATA(py_fn))); + + PyObject *py_landm = PyDict_New(); + for (std::map::iterator it=landm.begin(); it!=landm.end(); ++it) + PyDict_SetItemString(py_landm, it->first.c_str(), Py_BuildValue("l", it->second)); + + PyObject *py_segm = PyDict_New(); + for (std::map >::iterator it=segm.begin(); it!=segm.end(); ++it) { + unsigned n = it->second.size(); + npy_intp dims[] = {n}; + PyObject *temp = PyArray_SimpleNew(1, dims, NPY_UINT32); + std::copy(it->second.begin(), it->second.end(), reinterpret_cast(PyArray_DATA(temp))); + PyDict_SetItemString(py_segm, it->first.c_str(), Py_BuildValue("N", temp)); + } + + return Py_BuildValue("NNNNNNsNN",py_v,py_vt,py_vn,py_f,py_ft,py_fn,mtl_path.c_str(),py_landm,py_segm); + } catch (LoadObjException& e) { + PyErr_SetString(LoadObjError, e.what()); + return NULL; + } +} diff --git a/mesh-master/mesh/src/py_visibility.cpp b/mesh-master/mesh/src/py_visibility.cpp new file mode 100644 index 0000000000000000000000000000000000000000..6ffd4da09f1e0921e8cf37e39cd110854aca1556 --- /dev/null +++ b/mesh-master/mesh/src/py_visibility.cpp @@ -0,0 +1,212 @@ +// Copyright (c) 2018 Max Planck Society for non-commercial scientific research +// This file is part of psbody.mesh project which is released under MPI License. +// See file LICENSE.txt for full license details. + +#include +#include +#include +#include +#include "nearest_triangle.hpp" +using boost::uint32_t; + +#include "cgal_error_emulation.hpp" +#include "visibility.h" + + +static PyObject * +visibility_compute(PyObject *self, PyObject *args, PyObject *keywds); + +static PyObject *VisibilityError; + +static PyMethodDef visibility_methods[] = { + {"visibility_compute", + (PyCFunction)visibility_compute, + METH_VARARGS | METH_KEYWORDS, + "visibility_compute."}, + {NULL, NULL, 0, NULL} /* Sentinel */ +}; + + +PyMODINIT_FUNC PyInit_visibility(void) +{ + PyObject *m; + + /// Static module-definition table + static PyModuleDef moduledef = { + PyModuleDef_HEAD_INIT, + "psbody.mesh.visibility", /* name of module */ + "", /* module documentation, may be NULL */ + -1, /* size of per-interpreter state of the module, or -1 if the module keeps state in global variables. */ + visibility_methods /* m_methods */ + }; + + /// Actually initialize the module object, + /// using the new Python 3 module-definition table + m = PyModule_Create(&moduledef); + + + import_array(); + VisibilityError = PyErr_NewException(const_cast("visibility.VisibilityError"), NULL, NULL); + Py_INCREF(VisibilityError); + PyModule_AddObject(m, "VisibilityError", VisibilityError); + + /// Initialize and check module + if (m == NULL) { return NULL; } + + /// Return module object + return m; +} + + +template +npy_intp parse_pyarray(const PyArrayObject *py_arr, const array* &cpp_arr){ + if (py_arr->descr->type_num != PYTYPE || py_arr->nd != 2) { + PyErr_SetString(PyExc_ValueError, + "Array must be of a specific type, and 2 dimensional"); + return NULL; + } + npy_intp* dims = PyArray_DIMS(py_arr); + if (dims[1] != 3) { + PyErr_SetString(PyExc_ValueError, "Array must be Nx3"); + return NULL; + } + CTYPE *c_arr = (CTYPE*)PyArray_DATA(py_arr); + cpp_arr = reinterpret_cast*>(c_arr); + return dims[0]; +} + +static PyObject * +visibility_compute(PyObject *self, PyObject *args, PyObject *keywds) +{ + try { + PyArrayObject *py_v=NULL, *py_f=NULL, *py_n=NULL, + *py_cams=NULL, *py_sensors=NULL, + *py_extra_v=NULL, *py_extra_f=NULL; + PyObject *py_tree=NULL; + double min_dist = 1e-3; + + static char* kwlist[] = {"cams","v","f","tree","n","sensors", + "extra_v", "extra_f", "min_dist", NULL}; + + if (!PyArg_ParseTupleAndKeywords(args, keywds, "O!|O!O!OO!O!O!O!d", kwlist, + &PyArray_Type, &py_cams, + &PyArray_Type, &py_v, + &PyArray_Type, &py_f, + &py_tree, + &PyArray_Type, &py_n, + &PyArray_Type, &py_sensors, + &PyArray_Type, &py_extra_v, + &PyArray_Type, &py_extra_f, + &min_dist)) + return NULL; + + bool use_sensors = (py_sensors != NULL); + bool use_extramesh = (py_extra_v != NULL && py_extra_f != NULL); + + TreeAndTri* search; + if(py_tree != NULL){ + search = (TreeAndTri *)PyCapsule_GetPointer(py_tree, NULL); + } + else{ + + const array* verts_arr; + const array* faces_arr; + + npy_intp nv = parse_pyarray(py_v, verts_arr); + npy_intp nf = parse_pyarray(py_f, faces_arr); + + search = new TreeAndTri; + search->points.reserve(nv); + for(size_t pp=0; pppoints.push_back(K::Point_3(verts_arr[pp][0], + verts_arr[pp][1], + verts_arr[pp][2])); + } + + search->triangles.reserve(nf); + for(size_t tt=0; tttriangles.push_back(K::Triangle_3(search->points[faces_arr[tt][0]], + search->points[faces_arr[tt][1]], + search->points[faces_arr[tt][2]])); + } + + if(use_extramesh){ + const array* verts_extra_arr; + const array* faces_extra_arr; + npy_intp nv_extra = parse_pyarray(py_extra_v, verts_extra_arr); + npy_intp nf_extra = parse_pyarray(py_extra_f, faces_extra_arr); + std::vector extrapoints; + + extrapoints.reserve(nv_extra); + for(size_t pp=0; pptriangles.reserve(nf+nf_extra); + for(size_t tt=0; tttriangles.push_back(K::Triangle_3(extrapoints[faces_extra_arr[tt][0]], + extrapoints[faces_extra_arr[tt][1]], + extrapoints[faces_extra_arr[tt][2]])); + } + } + + search->tree.rebuild(search->triangles.begin(), search->triangles.end()); + search->tree.accelerate_distance_queries(); + } + + if (py_cams->descr->type_num != NPY_DOUBLE || py_cams->nd != 2) { + PyErr_SetString(PyExc_ValueError, "Camera positions must be of type double, and 2 dimensional"); + return NULL; + } + + npy_intp* cam_dims = PyArray_DIMS(py_cams); + + if (cam_dims[1] != 3) { + PyErr_SetString(PyExc_ValueError, "Cams must be Nx3"); + return NULL; + } + + double *pN = NULL; + if (py_n != NULL){ + npy_intp* n_dims = PyArray_DIMS(py_n); + if (n_dims[1] != 3 || n_dims[0] != search->points.size()) { + PyErr_SetString(PyExc_ValueError, "Normals should have same number of rows as vertices, and 3 columns"); + return NULL; + } + pN = (double*)PyArray_DATA(py_n); + } + + double *pSensors = NULL; + if (use_sensors){ + npy_intp* n_dims = PyArray_DIMS(py_sensors); + if (n_dims[1] != 9 || n_dims[0] != cam_dims[0]) { + PyErr_SetString(PyExc_ValueError, "Sensors should have same number of rows as cameras, 3x3 columns"); + return NULL; + } + pSensors = (double*)PyArray_DATA(py_sensors); + } + + double *pCams = (double*)PyArray_DATA(py_cams); + + size_t C = cam_dims[0]; + + npy_intp result_dims[] = {C,search->points.size()}; + PyObject *py_bin_visibility = PyArray_SimpleNew(2, result_dims, NPY_UINT32); + PyObject *py_normal_dot_cam = PyArray_SimpleNew(2, result_dims, NPY_DOUBLE); + uint32_t* visibility = reinterpret_cast(PyArray_DATA(py_bin_visibility)); + double* normal_dot_cam = reinterpret_cast(PyArray_DATA(py_normal_dot_cam)); + + _internal_compute(search, pN, pCams, C, use_sensors, + pSensors, min_dist, visibility, normal_dot_cam); + + // Cleaning and returning + delete search; + return Py_BuildValue("NN",py_bin_visibility, py_normal_dot_cam); + + } catch (VisibilityException& e) { + PyErr_SetString(VisibilityError, e.what()); + return NULL; + } +} diff --git a/mesh-master/mesh/src/rply.c b/mesh-master/mesh/src/rply.c new file mode 100644 index 0000000000000000000000000000000000000000..a05ef0cc8359efdbc4b2cdefbbd12e5b7eb1621a --- /dev/null +++ b/mesh-master/mesh/src/rply.c @@ -0,0 +1,1522 @@ +/* ---------------------------------------------------------------------- + * RPly library, read/write PLY files + * Diego Nehab, Princeton University + * http://www.cs.princeton.edu/~diego/professional/rply + * + * This library is distributed under the MIT License. See notice + * at the end of this file. + * ---------------------------------------------------------------------- */ +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#ifdef _MSC_VER +typedef signed __int8 int8_t; +typedef signed __int16 int16_t; +typedef signed __int32 int32_t; +typedef signed __int64 int64_t; +typedef unsigned __int8 uint8_t; +typedef unsigned __int16 uint16_t; +typedef unsigned __int32 uint32_t; +typedef unsigned __int64 uint64_t; +#define INT8_MIN ((int8_t)_I8_MIN) +#define INT8_MAX _I8_MAX +#define INT16_MIN ((int16_t)_I16_MIN) +#define INT16_MAX _I16_MAX +#define INT32_MIN ((int32_t)_I32_MIN) +#define INT32_MAX _I32_MAX +#define INT64_MIN ((int64_t)_I64_MIN) +#define INT64_MAX _I64_MAX +#define UINT8_MAX _UI8_MAX +#define UINT16_MAX _UI16_MAX +#define UINT32_MAX _UI32_MAX +#define UINT64_MAX _UI64_MAX +#else +#include +#endif + +#include "rply.h" + +/* ---------------------------------------------------------------------- + * Constants + * ---------------------------------------------------------------------- */ +#define WORDSIZE 256 +#define LINESIZE 1024 +#define BUFFERSIZE (8*1024) + +typedef enum e_ply_io_mode_ { + PLY_READ, + PLY_WRITE +} e_ply_io_mode; + +static const char *const ply_storage_mode_list[] = { + "binary_big_endian", "binary_little_endian", "ascii", NULL +}; /* order matches e_ply_storage_mode enum */ + +static const char *const ply_type_list[] = { + "int8", "uint8", "int16", "uint16", + "int32", "uint32", "float32", "float64", + "char", "uchar", "short", "ushort", + "int", "uint", "float", "double", + "list", NULL +}; /* order matches e_ply_type enum */ + +/* ---------------------------------------------------------------------- + * Property reading callback argument + * + * element: name of element being processed + * property: name of property being processed + * nelements: number of elements of this kind in file + * instance_index: index current element of this kind being processed + * length: number of values in current list (or 1 for scalars) + * value_index: index of current value int this list (or 0 for scalars) + * value: value of property + * pdata/idata: user data defined with ply_set_cb + * + * Returns handle to ply file if succesful, NULL otherwise. + * ---------------------------------------------------------------------- */ +typedef struct t_ply_argument_ { + p_ply_element element; + long instance_index; + p_ply_property property; + long length, value_index; + double value; + void *pdata; + long idata; +} t_ply_argument; + +/* ---------------------------------------------------------------------- + * Property information + * + * name: name of this property + * type: type of this property (list or type of scalar value) + * length_type, value_type: type of list property count and values + * read_cb: function to be called when this property is called + * + * Returns 1 if should continue processing file, 0 if should abort. + * ---------------------------------------------------------------------- */ +typedef struct t_ply_property_ { + char name[WORDSIZE]; + e_ply_type type, value_type, length_type; + p_ply_read_cb read_cb; + void *pdata; + long idata; +} t_ply_property; + +/* ---------------------------------------------------------------------- + * Element information + * + * name: name of this property + * ninstances: number of elements of this type in file + * property: property descriptions for this element + * nproperty: number of properties in this element + * + * Returns 1 if should continue processing file, 0 if should abort. + * ---------------------------------------------------------------------- */ +typedef struct t_ply_element_ { + char name[WORDSIZE]; + long ninstances; + p_ply_property property; + long nproperties; +} t_ply_element; + +/* ---------------------------------------------------------------------- + * Input/output driver + * + * Depending on file mode, different functions are used to read/write + * property fields. The drivers make it transparent to read/write in ascii, + * big endian or little endian cases. + * ---------------------------------------------------------------------- */ +typedef int (*p_ply_ihandler)(p_ply ply, double *value); +typedef int (*p_ply_ichunk)(p_ply ply, void *anydata, size_t size); +typedef struct t_ply_idriver_ { + p_ply_ihandler ihandler[16]; + p_ply_ichunk ichunk; + const char *name; +} t_ply_idriver; +typedef t_ply_idriver *p_ply_idriver; + +typedef int (*p_ply_ohandler)(p_ply ply, double value); +typedef int (*p_ply_ochunk)(p_ply ply, void *anydata, size_t size); +typedef struct t_ply_odriver_ { + p_ply_ohandler ohandler[16]; + p_ply_ochunk ochunk; + const char *name; +} t_ply_odriver; +typedef t_ply_odriver *p_ply_odriver; + +/* ---------------------------------------------------------------------- + * Ply file handle. + * + * io_mode: read or write (from e_ply_io_mode) + * storage_mode: mode of file associated with handle (from e_ply_storage_mode) + * element: elements description for this file + * nelement: number of different elements in file + * comment: comments for this file + * ncomments: number of comments in file + * obj_info: obj_info items for this file + * nobj_infos: number of obj_info items in file + * fp: file pointer associated with ply file + * c: last character read from ply file + * buffer: last word/chunck of data read from ply file + * buffer_first, buffer_last: interval of untouched good data in buffer + * buffer_token: start of parsed token (line or word) in buffer + * idriver, odriver: input driver used to get property fields from file + * argument: storage space for callback arguments + * welement, wproperty: element/property type being written + * winstance_index: index of instance of current element being written + * wvalue_index: index of list property value being written + * wlength: number of values in list property being written + * error_cb: callback for error messages + * ---------------------------------------------------------------------- */ +typedef struct t_ply_ { + e_ply_io_mode io_mode; + e_ply_storage_mode storage_mode; + p_ply_element element; + long nelements; + char *comment; + long ncomments; + char *obj_info; + long nobj_infos; + FILE *fp; + int c; + char buffer[BUFFERSIZE]; + size_t buffer_first, buffer_token, buffer_last; + p_ply_idriver idriver; + p_ply_odriver odriver; + t_ply_argument argument; + long welement, wproperty; + long winstance_index, wvalue_index, wlength; + p_ply_error_cb error_cb; +} t_ply; + +/* ---------------------------------------------------------------------- + * I/O functions and drivers + * ---------------------------------------------------------------------- */ +static t_ply_idriver ply_idriver_ascii; +static t_ply_idriver ply_idriver_binary; +static t_ply_idriver ply_idriver_binary_reverse; +static t_ply_odriver ply_odriver_ascii; +static t_ply_odriver ply_odriver_binary; +static t_ply_odriver ply_odriver_binary_reverse; + +static int ply_read_word(p_ply ply); +static int ply_check_word(p_ply ply); +static int ply_read_line(p_ply ply); +static int ply_check_line(p_ply ply); +static int ply_read_chunk(p_ply ply, void *anybuffer, size_t size); +static int ply_read_chunk_reverse(p_ply ply, void *anybuffer, size_t size); +static int ply_write_chunk(p_ply ply, void *anybuffer, size_t size); +static int ply_write_chunk_reverse(p_ply ply, void *anybuffer, size_t size); +static void ply_reverse(void *anydata, size_t size); + +/* ---------------------------------------------------------------------- + * String functions + * ---------------------------------------------------------------------- */ +static int ply_find_string(const char *item, const char* const list[]); +static p_ply_element ply_find_element(p_ply ply, const char *name); +static p_ply_property ply_find_property(p_ply_element element, + const char *name); + +/* ---------------------------------------------------------------------- + * Header parsing + * ---------------------------------------------------------------------- */ +static int ply_read_header_format(p_ply ply); +static int ply_read_header_comment(p_ply ply); +static int ply_read_header_obj_info(p_ply ply); +static int ply_read_header_property(p_ply ply); +static int ply_read_header_element(p_ply ply); + +/* ---------------------------------------------------------------------- + * Error handling + * ---------------------------------------------------------------------- */ +static void ply_error_cb(const char *message); +static void ply_error(p_ply ply, const char *fmt, ...); + +/* ---------------------------------------------------------------------- + * Memory allocation and initialization + * ---------------------------------------------------------------------- */ +static void ply_init(p_ply ply); +static void ply_element_init(p_ply_element element); +static void ply_property_init(p_ply_property property); +static p_ply ply_alloc(void); +static p_ply_element ply_grow_element(p_ply ply); +static p_ply_property ply_grow_property(p_ply ply, p_ply_element element); +static void *ply_grow_array(p_ply ply, void **pointer, long *nmemb, long size); + +/* ---------------------------------------------------------------------- + * Special functions + * ---------------------------------------------------------------------- */ +static e_ply_storage_mode ply_arch_endian(void); +static int ply_type_check(void); + +/* ---------------------------------------------------------------------- + * Auxiliary read functions + * ---------------------------------------------------------------------- */ +static int ply_read_element(p_ply ply, p_ply_element element, + p_ply_argument argument); +static int ply_read_property(p_ply ply, p_ply_element element, + p_ply_property property, p_ply_argument argument); +static int ply_read_list_property(p_ply ply, p_ply_element element, + p_ply_property property, p_ply_argument argument); +static int ply_read_scalar_property(p_ply ply, p_ply_element element, + p_ply_property property, p_ply_argument argument); + + +/* ---------------------------------------------------------------------- + * Buffer support functions + * ---------------------------------------------------------------------- */ +/* pointers to tokenized word and line in buffer */ +#define BWORD(p) (p->buffer + p->buffer_token) +#define BLINE(p) (p->buffer + p->buffer_token) + +/* pointer to start of untouched bytes in buffer */ +#define BFIRST(p) (p->buffer + p->buffer_first) + +/* number of bytes untouched in buffer */ +#define BSIZE(p) (p->buffer_last - p->buffer_first) + +/* consumes data from buffer */ +#define BSKIP(p, s) (p->buffer_first += s) + +/* refills the buffer */ +static int BREFILL(p_ply ply) { + /* move untouched data to beginning of buffer */ + size_t size = BSIZE(ply); + memmove(ply->buffer, BFIRST(ply), size); + ply->buffer_last = size; + ply->buffer_first = ply->buffer_token = 0; + /* fill remaining with new data */ + size = fread(ply->buffer+size, 1, BUFFERSIZE-size-1, ply->fp); + /* place sentinel so we can use str* functions with buffer */ + ply->buffer[BUFFERSIZE-1] = '\0'; + /* check if read failed */ + if (size <= 0) return 0; + /* increase size to account for new data */ + ply->buffer_last += size; + return 1; +} + +/* ---------------------------------------------------------------------- + * Exported functions + * ---------------------------------------------------------------------- */ +/* ---------------------------------------------------------------------- + * Read support functions + * ---------------------------------------------------------------------- */ +p_ply ply_open(const char *name, p_ply_error_cb error_cb) { + char magic[5] = " "; + FILE *fp = NULL; + p_ply ply = NULL; + if (error_cb == NULL) error_cb = ply_error_cb; + if (!ply_type_check()) { + error_cb("Incompatible type system"); + return NULL; + } + assert(name); + fp = fopen(name, "rb"); + if (!fp) { + error_cb("Unable to open file"); + return NULL; + } + if (fread(magic, 1, 4, fp) < 4) { + error_cb("Error reading from file"); + fclose(fp); + return NULL; + } + if (strcmp(magic, "ply\n")) { + fclose(fp); + error_cb("Not a PLY file. Expected magic number 'ply\\n'"); + return NULL; + } + ply = ply_alloc(); + if (!ply) { + error_cb("Out of memory"); + fclose(fp); + return NULL; + } + ply->fp = fp; + ply->io_mode = PLY_READ; + ply->error_cb = error_cb; + return ply; +} + +int ply_read_header(p_ply ply) { + assert(ply && ply->fp && ply->io_mode == PLY_READ); + if (!ply_read_word(ply)) return 0; + /* parse file format */ + if (!ply_read_header_format(ply)) { + ply_error(ply, "Invalid file format"); + return 0; + } + /* parse elements, comments or obj_infos until the end of header */ + while (strcmp(BWORD(ply), "end_header")) { + if (!ply_read_header_comment(ply) && + !ply_read_header_element(ply) && + !ply_read_header_obj_info(ply)) { + ply_error(ply, "Unexpected token '%s'", BWORD(ply)); + return 0; + } + } + return 1; +} + +long ply_set_read_cb(p_ply ply, const char *element_name, + const char* property_name, p_ply_read_cb read_cb, + void *pdata, long idata) { + p_ply_element element = NULL; + p_ply_property property = NULL; + assert(ply && element_name && property_name); + element = ply_find_element(ply, element_name); + if (!element) return 0; + property = ply_find_property(element, property_name); + if (!property) return 0; + property->read_cb = read_cb; + property->pdata = pdata; + property->idata = idata; + return (int) element->ninstances; +} + +int ply_read(p_ply ply) { + long i; + p_ply_argument argument; + assert(ply && ply->fp && ply->io_mode == PLY_READ); + argument = &ply->argument; + /* for each element type */ + for (i = 0; i < ply->nelements; i++) { + p_ply_element element = &ply->element[i]; + argument->element = element; + if (!ply_read_element(ply, element, argument)) + return 0; + } + return 1; +} + +/* ---------------------------------------------------------------------- + * Write support functions + * ---------------------------------------------------------------------- */ +p_ply ply_create(const char *name, e_ply_storage_mode storage_mode, + p_ply_error_cb error_cb) { + FILE *fp = NULL; + p_ply ply = NULL; + if (error_cb == NULL) error_cb = ply_error_cb; + if (!ply_type_check()) { + error_cb("Incompatible type system"); + return NULL; + } + assert(name && storage_mode <= PLY_DEFAULT); + fp = fopen(name, "wb"); + if (!fp) { + error_cb("Unable to create file"); + return NULL; + } + ply = ply_alloc(); + if (!ply) { + fclose(fp); + error_cb("Out of memory"); + return NULL; + } + ply->io_mode = PLY_WRITE; + if (storage_mode == PLY_DEFAULT) storage_mode = ply_arch_endian(); + if (storage_mode == PLY_ASCII) ply->odriver = &ply_odriver_ascii; + else if (storage_mode == ply_arch_endian()) + ply->odriver = &ply_odriver_binary; + else ply->odriver = &ply_odriver_binary_reverse; + ply->storage_mode = storage_mode; + ply->fp = fp; + ply->error_cb = error_cb; + return ply; +} + +int ply_add_element(p_ply ply, const char *name, long ninstances) { + p_ply_element element = NULL; + assert(ply && ply->fp && ply->io_mode == PLY_WRITE); + assert(name && strlen(name) < WORDSIZE && ninstances >= 0); + if (strlen(name) >= WORDSIZE || ninstances < 0) { + ply_error(ply, "Invalid arguments"); + return 0; + } + element = ply_grow_element(ply); + if (!element) return 0; + strcpy(element->name, name); + element->ninstances = ninstances; + return 1; +} + +int ply_add_scalar_property(p_ply ply, const char *name, e_ply_type type) { + p_ply_element element = NULL; + p_ply_property property = NULL; + assert(ply && ply->fp && ply->io_mode == PLY_WRITE); + assert(name && strlen(name) < WORDSIZE); + assert(type < PLY_LIST); + if (strlen(name) >= WORDSIZE || type >= PLY_LIST) { + ply_error(ply, "Invalid arguments"); + return 0; + } + element = &ply->element[ply->nelements-1]; + property = ply_grow_property(ply, element); + if (!property) return 0; + strcpy(property->name, name); + property->type = type; + return 1; +} + +int ply_add_list_property(p_ply ply, const char *name, + e_ply_type length_type, e_ply_type value_type) { + p_ply_element element = NULL; + p_ply_property property = NULL; + assert(ply && ply->fp && ply->io_mode == PLY_WRITE); + assert(name && strlen(name) < WORDSIZE); + if (strlen(name) >= WORDSIZE) { + ply_error(ply, "Invalid arguments"); + return 0; + } + assert(length_type < PLY_LIST); + assert(value_type < PLY_LIST); + if (length_type >= PLY_LIST || value_type >= PLY_LIST) { + ply_error(ply, "Invalid arguments"); + return 0; + } + element = &ply->element[ply->nelements-1]; + property = ply_grow_property(ply, element); + if (!property) return 0; + strcpy(property->name, name); + property->type = PLY_LIST; + property->length_type = length_type; + property->value_type = value_type; + return 1; +} + +int ply_add_property(p_ply ply, const char *name, e_ply_type type, + e_ply_type length_type, e_ply_type value_type) { + if (type == PLY_LIST) + return ply_add_list_property(ply, name, length_type, value_type); + else + return ply_add_scalar_property(ply, name, type); +} + +int ply_add_comment(p_ply ply, const char *comment) { + char *new_comment = NULL; + assert(ply && comment && strlen(comment) < LINESIZE); + if (!comment || strlen(comment) >= LINESIZE) { + ply_error(ply, "Invalid arguments"); + return 0; + } + new_comment = (char *) ply_grow_array(ply, (void **) &ply->comment, + &ply->ncomments, LINESIZE); + if (!new_comment) return 0; + strcpy(new_comment, comment); + return 1; +} + +int ply_add_obj_info(p_ply ply, const char *obj_info) { + char *new_obj_info = NULL; + assert(ply && obj_info && strlen(obj_info) < LINESIZE); + if (!obj_info || strlen(obj_info) >= LINESIZE) { + ply_error(ply, "Invalid arguments"); + return 0; + } + new_obj_info = (char *) ply_grow_array(ply, (void **) &ply->obj_info, + &ply->nobj_infos, LINESIZE); + if (!new_obj_info) return 0; + strcpy(new_obj_info, obj_info); + return 1; +} + +int ply_write_header(p_ply ply) { + long i, j; + assert(ply && ply->fp && ply->io_mode == PLY_WRITE); + assert(ply->element || ply->nelements == 0); + assert(!ply->element || ply->nelements > 0); + if (fprintf(ply->fp, "ply\nformat %s 1.0\n", + ply_storage_mode_list[ply->storage_mode]) <= 0) goto error; + for (i = 0; i < ply->ncomments; i++) + if (fprintf(ply->fp, "comment %s\n", ply->comment + LINESIZE*i) <= 0) + goto error; + for (i = 0; i < ply->nobj_infos; i++) + if (fprintf(ply->fp, "obj_info %s\n", ply->obj_info + LINESIZE*i) <= 0) + goto error; + for (i = 0; i < ply->nelements; i++) { + p_ply_element element = &ply->element[i]; + assert(element->property || element->nproperties == 0); + assert(!element->property || element->nproperties > 0); + if (fprintf(ply->fp, "element %s %ld\n", element->name, + element->ninstances) <= 0) goto error; + for (j = 0; j < element->nproperties; j++) { + p_ply_property property = &element->property[j]; + if (property->type == PLY_LIST) { + if (fprintf(ply->fp, "property list %s %s %s\n", + ply_type_list[property->length_type], + ply_type_list[property->value_type], + property->name) <= 0) goto error; + } else { + if (fprintf(ply->fp, "property %s %s\n", + ply_type_list[property->type], + property->name) <= 0) goto error; + } + } + } + return fprintf(ply->fp, "end_header\n") > 0; +error: + ply_error(ply, "Error writing to file"); + return 0; +} + +int ply_write(p_ply ply, double value) { + p_ply_element element = NULL; + p_ply_property property = NULL; + int type = -1; + int breakafter = 0; + if (ply->welement > ply->nelements) return 0; + element = &ply->element[ply->welement]; + if (ply->wproperty > element->nproperties) return 0; + property = &element->property[ply->wproperty]; + if (property->type == PLY_LIST) { + if (ply->wvalue_index == 0) { + type = property->length_type; + ply->wlength = (int32_t) value; + } else type = property->value_type; + } else { + type = property->type; + ply->wlength = 0; + } + if (!ply->odriver->ohandler[type](ply, value)) { + ply_error(ply, "Failed writing %s of %s %d (%s: %s)", + property->name, element->name, + ply->winstance_index, + ply->odriver->name, ply_type_list[type]); + return 0; + } + ply->wvalue_index++; + if (ply->wvalue_index > ply->wlength) { + ply->wvalue_index = 0; + ply->wproperty++; + } + if (ply->wproperty >= element->nproperties) { + ply->wproperty = 0; + ply->winstance_index++; + if (ply->storage_mode == PLY_ASCII) breakafter = 1; + } + if (ply->winstance_index >= element->ninstances) { + ply->winstance_index = 0; + ply->welement++; + } + return !breakafter || putc('\n', ply->fp) > 0; +} + +int ply_close(p_ply ply) { + long i; + assert(ply && ply->fp); + assert(ply->element || ply->nelements == 0); + assert(!ply->element || ply->nelements > 0); + /* write last chunk to file */ + if (ply->io_mode == PLY_WRITE && + fwrite(ply->buffer, 1, ply->buffer_last, ply->fp) < ply->buffer_last) { + ply_error(ply, "Error closing up"); + return 0; + } + fclose(ply->fp); + /* free all memory used by handle */ + if (ply->element) { + for (i = 0; i < ply->nelements; i++) { + p_ply_element element = &ply->element[i]; + if (element->property) free(element->property); + } + free(ply->element); + } + if (ply->obj_info) free(ply->obj_info); + if (ply->comment) free(ply->comment); + free(ply); + return 1; +} + +/* ---------------------------------------------------------------------- + * Query support functions + * ---------------------------------------------------------------------- */ +p_ply_element ply_get_next_element(p_ply ply, + p_ply_element last) { + assert(ply); + if (!last) return ply->element; + last++; + if (last < ply->element + ply->nelements) return last; + else return NULL; +} + +int ply_get_element_info(p_ply_element element, const char** name, + long *ninstances) { + assert(element); + if (name) *name = element->name; + if (ninstances) *ninstances = (long) element->ninstances; + return 1; +} + +p_ply_property ply_get_next_property(p_ply_element element, + p_ply_property last) { + assert(element); + if (!last) return element->property; + last++; + if (last < element->property + element->nproperties) return last; + else return NULL; +} + +int ply_get_property_info(p_ply_property property, const char** name, + e_ply_type *type, e_ply_type *length_type, e_ply_type *value_type) { + assert(property); + if (name) *name = property->name; + if (type) *type = property->type; + if (length_type) *length_type = property->length_type; + if (value_type) *value_type = property->value_type; + return 1; + +} + +const char *ply_get_next_comment(p_ply ply, const char *last) { + assert(ply); + if (!last) return ply->comment; + last += LINESIZE; + if (last < ply->comment + LINESIZE*ply->ncomments) return last; + else return NULL; +} + +const char *ply_get_next_obj_info(p_ply ply, const char *last) { + assert(ply); + if (!last) return ply->obj_info; + last += LINESIZE; + if (last < ply->obj_info + LINESIZE*ply->nobj_infos) return last; + else return NULL; +} + +/* ---------------------------------------------------------------------- + * Callback argument support functions + * ---------------------------------------------------------------------- */ +int ply_get_argument_element(p_ply_argument argument, + p_ply_element *element, long *instance_index) { + assert(argument); + if (!argument) return 0; + if (element) *element = argument->element; + if (instance_index) *instance_index = argument->instance_index; + return 1; +} + +int ply_get_argument_property(p_ply_argument argument, + p_ply_property *property, long *length, long *value_index) { + assert(argument); + if (!argument) return 0; + if (property) *property = argument->property; + if (length) *length = argument->length; + if (value_index) *value_index = argument->value_index; + return 1; +} + +int ply_get_argument_user_data(p_ply_argument argument, void **pdata, + long *idata) { + assert(argument); + if (!argument) return 0; + if (pdata) *pdata = argument->pdata; + if (idata) *idata = argument->idata; + return 1; +} + +double ply_get_argument_value(p_ply_argument argument) { + assert(argument); + if (!argument) return 0.0; + return argument->value; +} + +/* ---------------------------------------------------------------------- + * Internal functions + * ---------------------------------------------------------------------- */ +static int ply_read_list_property(p_ply ply, p_ply_element element, + p_ply_property property, p_ply_argument argument) { + int l; + p_ply_read_cb read_cb = property->read_cb; + p_ply_ihandler *driver = ply->idriver->ihandler; + /* get list length */ + p_ply_ihandler handler = driver[property->length_type]; + double length; + if (!handler(ply, &length)) { + ply_error(ply, "Error reading '%s' of '%s' number %d", + property->name, element->name, argument->instance_index); + return 0; + } + /* invoke callback to pass length in value field */ + argument->length = (long) length; + argument->value_index = -1; + argument->value = length; + if (read_cb && !read_cb(argument)) { + ply_error(ply, "Aborted by user"); + return 0; + } + /* read list values */ + handler = driver[property->value_type]; + /* for each value in list */ + for (l = 0; l < (long) length; l++) { + /* read value from file */ + argument->value_index = l; + if (!handler(ply, &argument->value)) { + ply_error(ply, "Error reading value number %d of '%s' of " + "'%s' number %d", l+1, property->name, + element->name, argument->instance_index); + return 0; + } + /* invoke callback to pass value */ + if (read_cb && !read_cb(argument)) { + ply_error(ply, "Aborted by user"); + return 0; + } + } + return 1; +} + +static int ply_read_scalar_property(p_ply ply, p_ply_element element, + p_ply_property property, p_ply_argument argument) { + p_ply_read_cb read_cb = property->read_cb; + p_ply_ihandler *driver = ply->idriver->ihandler; + p_ply_ihandler handler = driver[property->type]; + argument->length = 1; + argument->value_index = 0; + if (!handler(ply, &argument->value)) { + ply_error(ply, "Error reading '%s' of '%s' number %d", + property->name, element->name, argument->instance_index); + return 0; + } + if (read_cb && !read_cb(argument)) { + ply_error(ply, "Aborted by user"); + return 0; + } + return 1; +} + +static int ply_read_property(p_ply ply, p_ply_element element, + p_ply_property property, p_ply_argument argument) { + if (property->type == PLY_LIST) + return ply_read_list_property(ply, element, property, argument); + else + return ply_read_scalar_property(ply, element, property, argument); +} + +static int ply_read_element(p_ply ply, p_ply_element element, + p_ply_argument argument) { + long j, k; + /* for each element of this type */ + for (j = 0; j < element->ninstances; j++) { + argument->instance_index = j; + /* for each property */ + for (k = 0; k < element->nproperties; k++) { + p_ply_property property = &element->property[k]; + argument->property = property; + argument->pdata = property->pdata; + argument->idata = property->idata; + if (!ply_read_property(ply, element, property, argument)) + return 0; + } + } + return 1; +} + +static int ply_find_string(const char *item, const char* const list[]) { + int i; + assert(item && list); + for (i = 0; list[i]; i++) + if (!strcmp(list[i], item)) return i; + return -1; +} + +static p_ply_element ply_find_element(p_ply ply, const char *name) { + p_ply_element element; + long i, nelements; + assert(ply && name); + element = ply->element; + nelements = ply->nelements; + assert(element || nelements == 0); + assert(!element || nelements > 0); + for (i = 0; i < nelements; i++) + if (!strcmp(element[i].name, name)) return &element[i]; + return NULL; +} + +static p_ply_property ply_find_property(p_ply_element element, + const char *name) { + p_ply_property property; + long i, nproperties; + assert(element && name); + property = element->property; + nproperties = element->nproperties; + assert(property || nproperties == 0); + assert(!property || nproperties > 0); + for (i = 0; i < nproperties; i++) + if (!strcmp(property[i].name, name)) return &property[i]; + return NULL; +} + +static int ply_check_word(p_ply ply) { + if (strlen(BLINE(ply)) >= WORDSIZE) { + ply_error(ply, "Word too long"); + return 0; + } + return 1; +} + +static int ply_read_word(p_ply ply) { + size_t t = 0; + assert(ply && ply->fp && ply->io_mode == PLY_READ); + /* skip leading blanks */ + while (1) { + t = strspn(BFIRST(ply), " \n\r\t"); + /* check if all buffer was made of blanks */ + if (t >= BSIZE(ply)) { + if (!BREFILL(ply)) { + ply_error(ply, "Unexpected end of file"); + return 0; + } + } else break; + } + BSKIP(ply, t); + /* look for a space after the current word */ + t = strcspn(BFIRST(ply), " \n\r\t"); + /* if we didn't reach the end of the buffer, we are done */ + if (t < BSIZE(ply)) { + ply->buffer_token = ply->buffer_first; + BSKIP(ply, t); + *BFIRST(ply) = '\0'; + BSKIP(ply, 1); + return ply_check_word(ply); + } + /* otherwise, try to refill buffer */ + if (!BREFILL(ply)) { + ply_error(ply, "Unexpected end of file"); + return 0; + } + /* keep looking from where we left */ + t += strcspn(BFIRST(ply) + t, " \n\r\t"); + /* check if the token is too large for our buffer */ + if (t >= BSIZE(ply)) { + ply_error(ply, "Token too large"); + return 0; + } + /* we are done */ + ply->buffer_token = ply->buffer_first; + BSKIP(ply, t); + *BFIRST(ply) = '\0'; + BSKIP(ply, 1); + return ply_check_word(ply); +} + +static int ply_check_line(p_ply ply) { + if (strlen(BLINE(ply)) >= LINESIZE) { + ply_error(ply, "Line too long"); + return 0; + } + return 1; +} + +static int ply_read_line(p_ply ply) { + const char *end = NULL; + assert(ply && ply->fp && ply->io_mode == PLY_READ); + /* look for a end of line */ + end = strchr(BFIRST(ply), '\n'); + /* if we didn't reach the end of the buffer, we are done */ + if (end) { + ply->buffer_token = ply->buffer_first; + BSKIP(ply, end - BFIRST(ply)); + *BFIRST(ply) = '\0'; + BSKIP(ply, 1); + return ply_check_line(ply); + } else { + end = ply->buffer + BSIZE(ply); + /* otherwise, try to refill buffer */ + if (!BREFILL(ply)) { + ply_error(ply, "Unexpected end of file"); + return 0; + } + } + /* keep looking from where we left */ + end = strchr(end, '\n'); + /* check if the token is too large for our buffer */ + if (!end) { + ply_error(ply, "Token too large"); + return 0; + } + /* we are done */ + ply->buffer_token = ply->buffer_first; + BSKIP(ply, end - BFIRST(ply)); + *BFIRST(ply) = '\0'; + BSKIP(ply, 1); + return ply_check_line(ply); +} + +static int ply_read_chunk(p_ply ply, void *anybuffer, size_t size) { + char *buffer = (char *) anybuffer; + size_t i = 0; + assert(ply && ply->fp && ply->io_mode == PLY_READ); + assert(ply->buffer_first <= ply->buffer_last); + while (i < size) { + if (ply->buffer_first < ply->buffer_last) { + buffer[i] = ply->buffer[ply->buffer_first]; + ply->buffer_first++; + i++; + } else { + ply->buffer_first = 0; + ply->buffer_last = fread(ply->buffer, 1, BUFFERSIZE, ply->fp); + if (ply->buffer_last <= 0) return 0; + } + } + return 1; +} + +static int ply_write_chunk(p_ply ply, void *anybuffer, size_t size) { + char *buffer = (char *) anybuffer; + size_t i = 0; + assert(ply && ply->fp && ply->io_mode == PLY_WRITE); + assert(ply->buffer_last <= BUFFERSIZE); + while (i < size) { + if (ply->buffer_last < BUFFERSIZE) { + ply->buffer[ply->buffer_last] = buffer[i]; + ply->buffer_last++; + i++; + } else { + ply->buffer_last = 0; + if (fwrite(ply->buffer, 1, BUFFERSIZE, ply->fp) < BUFFERSIZE) + return 0; + } + } + return 1; +} + +static int ply_write_chunk_reverse(p_ply ply, void *anybuffer, size_t size) { + int ret = 0; + ply_reverse(anybuffer, size); + ret = ply_write_chunk(ply, anybuffer, size); + ply_reverse(anybuffer, size); + return ret; +} + +static int ply_read_chunk_reverse(p_ply ply, void *anybuffer, size_t size) { + if (!ply_read_chunk(ply, anybuffer, size)) return 0; + ply_reverse(anybuffer, size); + return 1; +} + +static void ply_reverse(void *anydata, size_t size) { + char *data = (char *) anydata; + char temp; + size_t i; + for (i = 0; i < size/2; i++) { + temp = data[i]; + data[i] = data[size-i-1]; + data[size-i-1] = temp; + } +} + +static void ply_init(p_ply ply) { + ply->c = ' '; + ply->element = NULL; + ply->nelements = 0; + ply->comment = NULL; + ply->ncomments = 0; + ply->obj_info = NULL; + ply->nobj_infos = 0; + ply->idriver = NULL; + ply->odriver = NULL; + ply->buffer[0] = '\0'; + ply->buffer_first = ply->buffer_last = ply->buffer_token = 0; + ply->welement = 0; + ply->wproperty = 0; + ply->winstance_index = 0; + ply->wlength = 0; + ply->wvalue_index = 0; +} + +static void ply_element_init(p_ply_element element) { + element->name[0] = '\0'; + element->ninstances = 0; + element->property = NULL; + element->nproperties = 0; +} + +static void ply_property_init(p_ply_property property) { + property->name[0] = '\0'; + property->type = -1; + property->length_type = -1; + property->value_type = -1; + property->read_cb = (p_ply_read_cb) NULL; + property->pdata = NULL; + property->idata = 0; +} + +static p_ply ply_alloc(void) { + p_ply ply = (p_ply) malloc(sizeof(t_ply)); + if (!ply) return NULL; + ply_init(ply); + return ply; +} + +static void *ply_grow_array(p_ply ply, void **pointer, + long *nmemb, long size) { + void *temp = *pointer; + long count = *nmemb + 1; + if (!temp) temp = malloc(count*size); + else temp = realloc(temp, count*size); + if (!temp) { + ply_error(ply, "Out of memory"); + return NULL; + } + *pointer = temp; + *nmemb = count; + return (char *) temp + (count-1) * size; +} + +static p_ply_element ply_grow_element(p_ply ply) { + p_ply_element element = NULL; + assert(ply); + assert(ply->element || ply->nelements == 0); + assert(!ply->element || ply->nelements > 0); + element = (p_ply_element) ply_grow_array(ply, (void **) &ply->element, + &ply->nelements, sizeof(t_ply_element)); + if (!element) return NULL; + ply_element_init(element); + return element; +} + +static p_ply_property ply_grow_property(p_ply ply, p_ply_element element) { + p_ply_property property = NULL; + assert(ply); + assert(element); + assert(element->property || element->nproperties == 0); + assert(!element->property || element->nproperties > 0); + property = (p_ply_property) ply_grow_array(ply, + (void **) &element->property, + &element->nproperties, sizeof(t_ply_property)); + if (!property) return NULL; + ply_property_init(property); + return property; +} + +static int ply_read_header_format(p_ply ply) { + assert(ply && ply->fp && ply->io_mode == PLY_READ); + if (strcmp(BWORD(ply), "format")) return 0; + if (!ply_read_word(ply)) return 0; + ply->storage_mode = ply_find_string(BWORD(ply), ply_storage_mode_list); + if (ply->storage_mode == (e_ply_storage_mode) (-1)) return 0; + if (ply->storage_mode == PLY_ASCII) ply->idriver = &ply_idriver_ascii; + else if (ply->storage_mode == ply_arch_endian()) + ply->idriver = &ply_idriver_binary; + else ply->idriver = &ply_idriver_binary_reverse; + if (!ply_read_word(ply)) return 0; + if (strcmp(BWORD(ply), "1.0")) return 0; + if (!ply_read_word(ply)) return 0; + return 1; +} + +static int ply_read_header_comment(p_ply ply) { + assert(ply && ply->fp && ply->io_mode == PLY_READ); + if (strcmp(BWORD(ply), "comment")) return 0; + if (!ply_read_line(ply)) return 0; + if (!ply_add_comment(ply, BLINE(ply))) return 0; + if (!ply_read_word(ply)) return 0; + return 1; +} + +static int ply_read_header_obj_info(p_ply ply) { + assert(ply && ply->fp && ply->io_mode == PLY_READ); + if (strcmp(BWORD(ply), "obj_info")) return 0; + if (!ply_read_line(ply)) return 0; + if (!ply_add_obj_info(ply, BLINE(ply))) return 0; + if (!ply_read_word(ply)) return 0; + return 1; +} + +static int ply_read_header_property(p_ply ply) { + p_ply_element element = NULL; + p_ply_property property = NULL; + /* make sure it is a property */ + if (strcmp(BWORD(ply), "property")) return 0; + element = &ply->element[ply->nelements-1]; + property = ply_grow_property(ply, element); + if (!property) return 0; + /* get property type */ + if (!ply_read_word(ply)) return 0; + property->type = ply_find_string(BWORD(ply), ply_type_list); + if (property->type == (e_ply_type) (-1)) return 0; + if (property->type == PLY_LIST) { + /* if it's a list, we need the base types */ + if (!ply_read_word(ply)) return 0; + property->length_type = ply_find_string(BWORD(ply), ply_type_list); + if (property->length_type == (e_ply_type) (-1)) return 0; + if (!ply_read_word(ply)) return 0; + property->value_type = ply_find_string(BWORD(ply), ply_type_list); + if (property->value_type == (e_ply_type) (-1)) return 0; + } + /* get property name */ + if (!ply_read_word(ply)) return 0; + strcpy(property->name, BWORD(ply)); + if (!ply_read_word(ply)) return 0; + return 1; +} + +static int ply_read_header_element(p_ply ply) { + p_ply_element element = NULL; + long dummy; + assert(ply && ply->fp && ply->io_mode == PLY_READ); + if (strcmp(BWORD(ply), "element")) return 0; + /* allocate room for new element */ + element = ply_grow_element(ply); + if (!element) return 0; + /* get element name */ + if (!ply_read_word(ply)) return 0; + strcpy(element->name, BWORD(ply)); + /* get number of elements of this type */ + if (!ply_read_word(ply)) return 0; + if (sscanf(BWORD(ply), "%ld", &dummy) != 1) { + ply_error(ply, "Expected number got '%s'", BWORD(ply)); + return 0; + } + element->ninstances = dummy; + /* get all properties for this element */ + if (!ply_read_word(ply)) return 0; + while (ply_read_header_property(ply) || + ply_read_header_comment(ply) || ply_read_header_obj_info(ply)) + /* do nothing */; + return 1; +} + +static void ply_error_cb(const char *message) { + fprintf(stderr, "RPly: %s\n", message); +} + +static void ply_error(p_ply ply, const char *fmt, ...) { + char buffer[1024]; + va_list ap; + va_start(ap, fmt); + vsprintf(buffer, fmt, ap); + va_end(ap); + ply->error_cb(buffer); +} + +static e_ply_storage_mode ply_arch_endian(void) { + uint32_t i = 1; + unsigned char *s = (unsigned char *) &i; + if (*s == 1) return PLY_LITTLE_ENDIAN; + else return PLY_BIG_ENDIAN; +} + +static int ply_type_check(void) { + assert(sizeof(char) == 1); + assert(sizeof(unsigned char) == 1); + assert(sizeof(short) == 2); + assert(sizeof(unsigned short) == 2); + assert(sizeof(int32_t) == 4); + assert(sizeof(uint32_t) == 4); + assert(sizeof(float) == 4); + assert(sizeof(double) == 8); + if (sizeof(char) != 1) return 0; + if (sizeof(unsigned char) != 1) return 0; + if (sizeof(short) != 2) return 0; + if (sizeof(unsigned short) != 2) return 0; + if (sizeof(int32_t) != 4) return 0; + if (sizeof(uint32_t) != 4) return 0; + if (sizeof(float) != 4) return 0; + if (sizeof(double) != 8) return 0; + return 1; +} + +/* ---------------------------------------------------------------------- + * Output handlers + * ---------------------------------------------------------------------- */ +static int oascii_int8(p_ply ply, double value) { + if (value > CHAR_MAX || value < CHAR_MIN) return 0; + return fprintf(ply->fp, "%d ", (char) value) > 0; +} + +static int oascii_uint8(p_ply ply, double value) { + if (value > UCHAR_MAX || value < 0) return 0; + return fprintf(ply->fp, "%d ", (unsigned char) value) > 0; +} + +static int oascii_int16(p_ply ply, double value) { + if (value > SHRT_MAX || value < SHRT_MIN) return 0; + return fprintf(ply->fp, "%d ", (short) value) > 0; +} + +static int oascii_uint16(p_ply ply, double value) { + if (value > USHRT_MAX || value < 0) return 0; + return fprintf(ply->fp, "%d ", (unsigned short) value) > 0; +} + +static int oascii_int32(p_ply ply, double value) { + if (value > INT32_MAX || value < INT32_MIN) return 0; + return fprintf(ply->fp, "%d ", (int) value) > 0; +} + +static int oascii_uint32(p_ply ply, double value) { + if (value > UINT32_MAX || value < 0) return 0; + return fprintf(ply->fp, "%d ", (unsigned int) value) > 0; +} + +static int oascii_float32(p_ply ply, double value) { + if (value < -FLT_MAX || value > FLT_MAX) return 0; + return fprintf(ply->fp, "%g ", (float) value) > 0; +} + +static int oascii_float64(p_ply ply, double value) { + if (value < -DBL_MAX || value > DBL_MAX) return 0; + return fprintf(ply->fp, "%g ", value) > 0; +} + +static int obinary_int8(p_ply ply, double value) { + char int8 = (char) value; + if (value > CHAR_MAX || value < CHAR_MIN) return 0; + return ply->odriver->ochunk(ply, &int8, sizeof(int8)); +} + +static int obinary_uint8(p_ply ply, double value) { + unsigned char uint8 = (unsigned char) value; + if (value > UCHAR_MAX || value < 0) return 0; + return ply->odriver->ochunk(ply, &uint8, sizeof(uint8)); +} + +static int obinary_int16(p_ply ply, double value) { + short int16 = (short) value; + if (value > SHRT_MAX || value < SHRT_MIN) return 0; + return ply->odriver->ochunk(ply, &int16, sizeof(int16)); +} + +static int obinary_uint16(p_ply ply, double value) { + unsigned short uint16 = (unsigned short) value; + if (value > USHRT_MAX || value < 0) return 0; + return ply->odriver->ochunk(ply, &uint16, sizeof(uint16)); +} + +static int obinary_int32(p_ply ply, double value) { + int32_t int32 = (int32_t) value; + if (value > INT32_MAX || value < INT32_MIN) return 0; + return ply->odriver->ochunk(ply, &int32, sizeof(int32)); +} + +static int obinary_uint32(p_ply ply, double value) { + uint32_t uint32 = (uint32_t) value; + if (value > UINT32_MAX || value < 0) return 0; + return ply->odriver->ochunk(ply, &uint32, sizeof(uint32)); +} + +static int obinary_float32(p_ply ply, double value) { + float float32 = (float) value; + if (value > FLT_MAX || value < -FLT_MAX) return 0; + return ply->odriver->ochunk(ply, &float32, sizeof(float32)); +} + +static int obinary_float64(p_ply ply, double value) { + return ply->odriver->ochunk(ply, &value, sizeof(value)); +} + +/* ---------------------------------------------------------------------- + * Input handlers + * ---------------------------------------------------------------------- */ +static int iascii_int8(p_ply ply, double *value) { + char *end; + if (!ply_read_word(ply)) return 0; + *value = strtol(BWORD(ply), &end, 10); + if (*end || *value > CHAR_MAX || *value < CHAR_MIN) return 0; + return 1; +} + +static int iascii_uint8(p_ply ply, double *value) { + char *end; + if (!ply_read_word(ply)) return 0; + *value = strtol(BWORD(ply), &end, 10); + if (*end || *value > UCHAR_MAX || *value < 0) return 0; + return 1; +} + +static int iascii_int16(p_ply ply, double *value) { + char *end; + if (!ply_read_word(ply)) return 0; + *value = strtol(BWORD(ply), &end, 10); + if (*end || *value > SHRT_MAX || *value < SHRT_MIN) return 0; + return 1; +} + +static int iascii_uint16(p_ply ply, double *value) { + char *end; + if (!ply_read_word(ply)) return 0; + *value = strtol(BWORD(ply), &end, 10); + if (*end || *value > USHRT_MAX || *value < 0) return 0; + return 1; +} + +static int iascii_int32(p_ply ply, double *value) { + char *end; + if (!ply_read_word(ply)) return 0; + *value = strtol(BWORD(ply), &end, 10); + if (*end || *value > INT32_MAX || *value < INT32_MIN) return 0; + return 1; +} + +static int iascii_uint32(p_ply ply, double *value) { + char *end; + if (!ply_read_word(ply)) return 0; + *value = strtol(BWORD(ply), &end, 10); + if (*end || *value < 0) return 0; + return 1; +} + +static int iascii_float32(p_ply ply, double *value) { + char *end; + if (!ply_read_word(ply)) return 0; + *value = strtod(BWORD(ply), &end); + if (*end || *value < -FLT_MAX || *value > FLT_MAX) return 0; + return 1; +} + +static int iascii_float64(p_ply ply, double *value) { + char *end; + if (!ply_read_word(ply)) return 0; + *value = strtod(BWORD(ply), &end); + if (*end || *value < -DBL_MAX || *value > DBL_MAX) return 0; + return 1; +} + +static int ibinary_int8(p_ply ply, double *value) { + char int8; + if (!ply->idriver->ichunk(ply, &int8, 1)) return 0; + *value = int8; + return 1; +} + +static int ibinary_uint8(p_ply ply, double *value) { + unsigned char uint8; + if (!ply->idriver->ichunk(ply, &uint8, 1)) return 0; + *value = uint8; + return 1; +} + +static int ibinary_int16(p_ply ply, double *value) { + short int16; + if (!ply->idriver->ichunk(ply, &int16, sizeof(int16))) return 0; + *value = int16; + return 1; +} + +static int ibinary_uint16(p_ply ply, double *value) { + unsigned short uint16; + if (!ply->idriver->ichunk(ply, &uint16, sizeof(uint16))) return 0; + *value = uint16; + return 1; +} + +static int ibinary_int32(p_ply ply, double *value) { + int32_t int32; + if (!ply->idriver->ichunk(ply, &int32, sizeof(int32))) return 0; + *value = int32; + return 1; +} + +static int ibinary_uint32(p_ply ply, double *value) { + uint32_t uint32; + if (!ply->idriver->ichunk(ply, &uint32, sizeof(uint32))) return 0; + *value = uint32; + return 1; +} + +static int ibinary_float32(p_ply ply, double *value) { + float float32; + if (!ply->idriver->ichunk(ply, &float32, sizeof(float32))) return 0; + *value = float32; + ply_reverse(&float32, sizeof(float32)); + return 1; +} + +static int ibinary_float64(p_ply ply, double *value) { + return ply->idriver->ichunk(ply, value, sizeof(double)); +} + +/* ---------------------------------------------------------------------- + * Constants + * ---------------------------------------------------------------------- */ +static t_ply_idriver ply_idriver_ascii = { + { iascii_int8, iascii_uint8, iascii_int16, iascii_uint16, + iascii_int32, iascii_uint32, iascii_float32, iascii_float64, + iascii_int8, iascii_uint8, iascii_int16, iascii_uint16, + iascii_int32, iascii_uint32, iascii_float32, iascii_float64 + }, /* order matches e_ply_type enum */ + NULL, + "ascii input" +}; + +static t_ply_idriver ply_idriver_binary = { + { ibinary_int8, ibinary_uint8, ibinary_int16, ibinary_uint16, + ibinary_int32, ibinary_uint32, ibinary_float32, ibinary_float64, + ibinary_int8, ibinary_uint8, ibinary_int16, ibinary_uint16, + ibinary_int32, ibinary_uint32, ibinary_float32, ibinary_float64 + }, /* order matches e_ply_type enum */ + ply_read_chunk, + "binary input" +}; + +static t_ply_idriver ply_idriver_binary_reverse = { + { ibinary_int8, ibinary_uint8, ibinary_int16, ibinary_uint16, + ibinary_int32, ibinary_uint32, ibinary_float32, ibinary_float64, + ibinary_int8, ibinary_uint8, ibinary_int16, ibinary_uint16, + ibinary_int32, ibinary_uint32, ibinary_float32, ibinary_float64 + }, /* order matches e_ply_type enum */ + ply_read_chunk_reverse, + "reverse binary input" +}; + +static t_ply_odriver ply_odriver_ascii = { + { oascii_int8, oascii_uint8, oascii_int16, oascii_uint16, + oascii_int32, oascii_uint32, oascii_float32, oascii_float64, + oascii_int8, oascii_uint8, oascii_int16, oascii_uint16, + oascii_int32, oascii_uint32, oascii_float32, oascii_float64 + }, /* order matches e_ply_type enum */ + NULL, + "ascii output" +}; + +static t_ply_odriver ply_odriver_binary = { + { obinary_int8, obinary_uint8, obinary_int16, obinary_uint16, + obinary_int32, obinary_uint32, obinary_float32, obinary_float64, + obinary_int8, obinary_uint8, obinary_int16, obinary_uint16, + obinary_int32, obinary_uint32, obinary_float32, obinary_float64 + }, /* order matches e_ply_type enum */ + ply_write_chunk, + "binary output" +}; + +static t_ply_odriver ply_odriver_binary_reverse = { + { obinary_int8, obinary_uint8, obinary_int16, obinary_uint16, + obinary_int32, obinary_uint32, obinary_float32, obinary_float64, + obinary_int8, obinary_uint8, obinary_int16, obinary_uint16, + obinary_int32, obinary_uint32, obinary_float32, obinary_float64 + }, /* order matches e_ply_type enum */ + ply_write_chunk_reverse, + "reverse binary output" +}; + +/* ---------------------------------------------------------------------- + * Copyright (C) 2003 Diego Nehab. All rights reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining + * a copy of this software and associated documentation files (the + * "Software"), to deal in the Software without restriction, including + * without limitation the rights to use, copy, modify, merge, publish, + * distribute, sublicense, and/or sell copies of the Software, and to + * permit persons to whom the Software is furnished to do so, subject to + * the following conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + * ---------------------------------------------------------------------- */ diff --git a/mesh-master/mesh/src/rply.h b/mesh-master/mesh/src/rply.h new file mode 100644 index 0000000000000000000000000000000000000000..049fe1868c6d5933ebd1f036c196a5c38dfff6a2 --- /dev/null +++ b/mesh-master/mesh/src/rply.h @@ -0,0 +1,365 @@ +#ifndef PLY_H +#define PLY_H +/* ---------------------------------------------------------------------- + * RPly library, read/write PLY files + * Diego Nehab, Princeton University + * http://www.cs.princeton.edu/~diego/professional/rply + * + * This library is distributed under the MIT License. See notice + * at the end of this file. + * ---------------------------------------------------------------------- */ + +#ifdef __cplusplus +extern "C" { +#endif + +#define RPLY_VERSION "RPly 1.01" +#define RPLY_COPYRIGHT "Copyright (C) 2003-2005 Diego Nehab" +#define RPLY_AUTHORS "Diego Nehab" + +/* ---------------------------------------------------------------------- + * Types + * ---------------------------------------------------------------------- */ +/* structures are opaque */ +typedef struct t_ply_ *p_ply; +typedef struct t_ply_element_ *p_ply_element; +typedef struct t_ply_property_ *p_ply_property; +typedef struct t_ply_argument_ *p_ply_argument; + +/* ply format mode type */ +typedef enum e_ply_storage_mode_ { + PLY_BIG_ENDIAN, + PLY_LITTLE_ENDIAN, + PLY_ASCII, + PLY_DEFAULT /* has to be the last in enum */ +} e_ply_storage_mode; /* order matches ply_storage_mode_list */ + +/* ply data type */ +typedef enum e_ply_type { + PLY_INT8, PLY_UINT8, PLY_INT16, PLY_UINT16, + PLY_INT32, PLY_UIN32, PLY_FLOAT32, PLY_FLOAT64, + PLY_CHAR, PLY_UCHAR, PLY_SHORT, PLY_USHORT, + PLY_INT, PLY_UINT, PLY_FLOAT, PLY_DOUBLE, + PLY_LIST /* has to be the last in enum */ +} e_ply_type; /* order matches ply_type_list */ + +/* ---------------------------------------------------------------------- + * Property reading callback prototype + * + * message: error message + * ---------------------------------------------------------------------- */ +typedef void (*p_ply_error_cb)(const char *message); + +/* ---------------------------------------------------------------------- + * Opens a ply file for reading (fails if file is not a ply file) + * + * error_cb: error callback function + * name: file name + * + * Returns 1 if successful, 0 otherwise + * ---------------------------------------------------------------------- */ +p_ply ply_open(const char *name, p_ply_error_cb error_cb); + +/* ---------------------------------------------------------------------- + * Reads and parses the header of a ply file returned by ply_open + * + * ply: handle returned by ply_open + * + * Returns 1 if successfull, 0 otherwise + * ---------------------------------------------------------------------- */ +int ply_read_header(p_ply ply); + +/* ---------------------------------------------------------------------- + * Property reading callback prototype + * + * argument: parameters for property being processed when callback is called + * + * Returns 1 if should continue processing file, 0 if should abort. + * ---------------------------------------------------------------------- */ +typedef int (*p_ply_read_cb)(p_ply_argument argument); + +/* ---------------------------------------------------------------------- + * Sets up callbacks for property reading after header was parsed + * + * ply: handle returned by ply_open + * element_name: element where property is + * property_name: property to associate element with + * read_cb: function to be called for each property value + * pdata/idata: user data that will be passed to callback + * + * Returns 0 if no element or no property in element, returns the + * number of element instances otherwise. + * ---------------------------------------------------------------------- */ +long ply_set_read_cb(p_ply ply, const char *element_name, + const char *property_name, p_ply_read_cb read_cb, + void *pdata, long idata); + +/* ---------------------------------------------------------------------- + * Returns information about the element originating a callback + * + * argument: handle to argument + * element: receives a the element handle (if non-null) + * instance_index: receives the index of the current element instance + * (if non-null) + * + * Returns 1 if successfull, 0 otherwise + * ---------------------------------------------------------------------- */ +int ply_get_argument_element(p_ply_argument argument, + p_ply_element *element, long *instance_index); + +/* ---------------------------------------------------------------------- + * Returns information about the property originating a callback + * + * argument: handle to argument + * property: receives the property handle (if non-null) + * length: receives the number of values in this property (if non-null) + * value_index: receives the index of current property value (if non-null) + * + * Returns 1 if successfull, 0 otherwise + * ---------------------------------------------------------------------- */ +int ply_get_argument_property(p_ply_argument argument, + p_ply_property *property, long *length, long *value_index); + +/* ---------------------------------------------------------------------- + * Returns user data associated with callback + * + * pdata: receives a copy of user custom data pointer (if non-null) + * idata: receives a copy of user custom data integer (if non-null) + * + * Returns 1 if successfull, 0 otherwise + * ---------------------------------------------------------------------- */ +int ply_get_argument_user_data(p_ply_argument argument, void **pdata, + long *idata); + +/* ---------------------------------------------------------------------- + * Returns the value associated with a callback + * + * argument: handle to argument + * + * Returns the current data item + * ---------------------------------------------------------------------- */ +double ply_get_argument_value(p_ply_argument argument); + +/* ---------------------------------------------------------------------- + * Reads all elements and properties calling the callbacks defined with + * calls to ply_set_read_cb + * + * ply: handle returned by ply_open + * + * Returns 1 if successfull, 0 otherwise + * ---------------------------------------------------------------------- */ +int ply_read(p_ply ply); + +/* ---------------------------------------------------------------------- + * Iterates over all elements by returning the next element. + * Call with NULL to return handle to first element. + * + * ply: handle returned by ply_open + * last: handle of last element returned (NULL for first element) + * + * Returns element if successfull or NULL if no more elements + * ---------------------------------------------------------------------- */ +p_ply_element ply_get_next_element(p_ply ply, p_ply_element last); + +/* ---------------------------------------------------------------------- + * Iterates over all comments by returning the next comment. + * Call with NULL to return pointer to first comment. + * + * ply: handle returned by ply_open + * last: pointer to last comment returned (NULL for first comment) + * + * Returns comment if successfull or NULL if no more comments + * ---------------------------------------------------------------------- */ +const char *ply_get_next_comment(p_ply ply, const char *last); + +/* ---------------------------------------------------------------------- + * Iterates over all obj_infos by returning the next obj_info. + * Call with NULL to return pointer to first obj_info. + * + * ply: handle returned by ply_open + * last: pointer to last obj_info returned (NULL for first obj_info) + * + * Returns obj_info if successfull or NULL if no more obj_infos + * ---------------------------------------------------------------------- */ +const char *ply_get_next_obj_info(p_ply ply, const char *last); + +/* ---------------------------------------------------------------------- + * Returns information about an element + * + * element: element of interest + * name: receives a pointer to internal copy of element name (if non-null) + * ninstances: receives the number of instances of this element (if non-null) + * + * Returns 1 if successfull or 0 otherwise + * ---------------------------------------------------------------------- */ +int ply_get_element_info(p_ply_element element, const char** name, + long *ninstances); + +/* ---------------------------------------------------------------------- + * Iterates over all properties by returning the next property. + * Call with NULL to return handle to first property. + * + * element: handle of element with the properties of interest + * last: handle of last property returned (NULL for first property) + * + * Returns element if successfull or NULL if no more properties + * ---------------------------------------------------------------------- */ +p_ply_property ply_get_next_property(p_ply_element element, + p_ply_property last); + +/* ---------------------------------------------------------------------- + * Returns information about a property + * + * property: handle to property of interest + * name: receives a pointer to internal copy of property name (if non-null) + * type: receives the property type (if non-null) + * length_type: for list properties, receives the scalar type of + * the length field (if non-null) + * value_type: for list properties, receives the scalar type of the value + * fields (if non-null) + * + * Returns 1 if successfull or 0 otherwise + * ---------------------------------------------------------------------- */ +int ply_get_property_info(p_ply_property property, const char** name, + e_ply_type *type, e_ply_type *length_type, e_ply_type *value_type); + +/* ---------------------------------------------------------------------- + * Creates new ply file + * + * name: file name + * storage_mode: file format mode + * + * Returns handle to ply file if successfull, NULL otherwise + * ---------------------------------------------------------------------- */ +p_ply ply_create(const char *name, e_ply_storage_mode storage_mode, + p_ply_error_cb error_cb); + +/* ---------------------------------------------------------------------- + * Adds a new element to the ply file created by ply_create + * + * ply: handle returned by ply_create + * name: name of new element + * ninstances: number of element of this time in file + * + * Returns 1 if successfull, 0 otherwise + * ---------------------------------------------------------------------- */ +int ply_add_element(p_ply ply, const char *name, long ninstances); + +/* ---------------------------------------------------------------------- + * Adds a new property to the last element added by ply_add_element + * + * ply: handle returned by ply_create + * name: name of new property + * type: property type + * length_type: scalar type of length field of a list property + * value_type: scalar type of value fields of a list property + * + * Returns 1 if successfull, 0 otherwise + * ---------------------------------------------------------------------- */ +int ply_add_property(p_ply ply, const char *name, e_ply_type type, + e_ply_type length_type, e_ply_type value_type); + +/* ---------------------------------------------------------------------- + * Adds a new list property to the last element added by ply_add_element + * + * ply: handle returned by ply_create + * name: name of new property + * length_type: scalar type of length field of a list property + * value_type: scalar type of value fields of a list property + * + * Returns 1 if successfull, 0 otherwise + * ---------------------------------------------------------------------- */ +int ply_add_list_property(p_ply ply, const char *name, + e_ply_type length_type, e_ply_type value_type); + +/* ---------------------------------------------------------------------- + * Adds a new property to the last element added by ply_add_element + * + * ply: handle returned by ply_create + * name: name of new property + * type: property type + * + * Returns 1 if successfull, 0 otherwise + * ---------------------------------------------------------------------- */ +int ply_add_scalar_property(p_ply ply, const char *name, e_ply_type type); + +/* ---------------------------------------------------------------------- + * Adds a new comment item + * + * ply: handle returned by ply_create + * comment: pointer to string with comment text + * + * Returns 1 if successfull, 0 otherwise + * ---------------------------------------------------------------------- */ +int ply_add_comment(p_ply ply, const char *comment); + +/* ---------------------------------------------------------------------- + * Adds a new obj_info item + * + * ply: handle returned by ply_create + * comment: pointer to string with obj_info data + * + * Returns 1 if successfull, 0 otherwise + * ---------------------------------------------------------------------- */ +int ply_add_obj_info(p_ply ply, const char *obj_info); + +/* ---------------------------------------------------------------------- + * Writes the ply file header after all element and properties have been + * defined by calls to ply_add_element and ply_add_property + * + * ply: handle returned by ply_create + * + * Returns 1 if successfull, 0 otherwise + * ---------------------------------------------------------------------- */ +int ply_write_header(p_ply ply); + +/* ---------------------------------------------------------------------- + * Writes one property value, in the order they should be written to the + * file. For each element type, write all elements of that type in order. + * For each element, write all its properties in order. For scalar + * properties, just write the value. For list properties, write the length + * and then each of the values. + * + * ply: handle returned by ply_create + * + * Returns 1 if successfull, 0 otherwise + * ---------------------------------------------------------------------- */ +int ply_write(p_ply ply, double value); + +/* ---------------------------------------------------------------------- + * Closes a ply file handle. Releases all memory used by handle + * + * ply: handle to be closed. + * + * Returns 1 if successfull, 0 otherwise + * ---------------------------------------------------------------------- */ +int ply_close(p_ply ply); + +#ifdef __cplusplus +} +#endif + +#endif /* RPLY_H */ + +/* ---------------------------------------------------------------------- + * Copyright (C) 2003-2005 Diego Nehab. All rights reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining + * a copy of this software and associated documentation files (the + * "Software"), to deal in the Software without restriction, including + * without limitation the rights to use, copy, modify, merge, publish, + * distribute, sublicense, and/or sell copies of the Software, and to + * permit persons to whom the Software is furnished to do so, subject to + * the following conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + * ---------------------------------------------------------------------- */ diff --git a/mesh-master/mesh/src/spatialsearchmodule.cpp b/mesh-master/mesh/src/spatialsearchmodule.cpp new file mode 100644 index 0000000000000000000000000000000000000000..d452204bd087d589b35fc2c6d80206c5da5cc331 --- /dev/null +++ b/mesh-master/mesh/src/spatialsearchmodule.cpp @@ -0,0 +1,407 @@ + +// needed to avoid the link to debug "_d.lib" libraries +#include "hijack_python_headers.hpp" +#include + +#ifdef _OPENMP +#include +#endif + +#ifdef HAVE_TBB +#include "tbb/tbb.h" +using namespace tbb; +#endif + +#include "cgal_error_emulation.hpp" +#include "nearest_triangle.hpp" +#include "nearest_point_triangle_3.h" + +typedef uint32_t Index; + +static PyObject* spatialsearch_aabbtree_compute(PyObject *self, PyObject *args); + +static PyObject* spatialsearch_aabbtree_nearest(PyObject *self, PyObject *args); + +static PyObject* spatialsearch_aabbtree_nearest_alongnormal(PyObject *self, PyObject *args); + +static PyObject * +spatialsearch_aabbtree_intersections_indices(PyObject *self, PyObject *args, PyObject *keywds); + +static PyObject *Mesh_IntersectionsError; + +static PyMethodDef SpatialsearchMethods[] = { + {"aabbtree_compute", spatialsearch_aabbtree_compute, METH_VARARGS, "aabbtree_compute."}, + {"aabbtree_nearest", spatialsearch_aabbtree_nearest, METH_VARARGS, "aabbtree_nearest."}, + {"aabbtree_nearest_alongnormal", spatialsearch_aabbtree_nearest_alongnormal, METH_VARARGS, "aabbtree_nearest."}, + {NULL, NULL, 0} /* Sentinel */ +}; + +static struct PyModuleDef moduleDef = { + PyModuleDef_HEAD_INIT, + "spatialsearch", /* name of module */ + "", /* module documentation, may be NULL */ + -1, /* size of per-interpreter state of the module, or -1 if the module keeps state in global variables. */ + SpatialsearchMethods +}; + +PyMODINIT_FUNC PyInit_spatialsearch(void) { + + PyObject *m = PyModule_Create(&moduleDef); + if (m == NULL) { + return NULL; + } + + import_array(); + + // Add Exceptions Object + Mesh_IntersectionsError = PyErr_NewException(const_cast("spatialsearch.Mesh_IntersectionsError"), NULL, NULL); + Py_INCREF(Mesh_IntersectionsError); + PyModule_AddObject(m, "Mesh_IntersectionsError", Mesh_IntersectionsError); + + return m; +} + + +void aabb_tree_destructor(PyObject *ptr) +{ + TreeAndTri* search = (TreeAndTri*) PyCapsule_GetPointer(ptr, NULL); + delete search; +} + +static PyObject * +spatialsearch_aabbtree_compute(PyObject *self, PyObject *args) +{ + PyArrayObject *py_v = NULL, *py_f = NULL; // numpy memory copy, probably managed by python + + if (!PyArg_ParseTuple(args, "O!O!", &PyArray_Type, &py_v,&PyArray_Type, &py_f)) + return NULL; + + if (py_v->descr->type_num != NPY_DOUBLE || py_v->nd != 2) { + PyErr_SetString(PyExc_ValueError, "Vertices must be of type double, and 2 dimensional"); + return NULL; + } + if (py_f->descr->type_num != NPY_UINT32 || py_f->nd != 2) { + PyErr_SetString(PyExc_ValueError, "Faces must be of type uint32, and 2 dimensional"); + return NULL; + } + + npy_intp* v_dims = PyArray_DIMS(py_v); + npy_intp* f_dims = PyArray_DIMS(py_f); + + if (v_dims[1] != 3 || f_dims[1] != 3) { + PyErr_SetString(PyExc_ValueError, "Input must be Nx3"); + return NULL; + } + + double *pV = (double*)PyArray_DATA(py_v); + uint32_t *pF = (uint32_t*)PyArray_DATA(py_f); + + size_t P = v_dims[0]; + size_t T = f_dims[0]; + + array* m_mesh_tri=reinterpret_cast*>(pF); + array* m_mesh_points=reinterpret_cast*>(pV); + + TreeAndTri* search = new TreeAndTri; + search->points.reserve(P); + + for(size_t pp=0; pppoints.push_back(K::Point_3(m_mesh_points[pp][0], m_mesh_points[pp][1], m_mesh_points[pp][2])); + } + + search->triangles.reserve(T); + + for(size_t tt=0; tttriangles.push_back(K::Triangle_3(search->points[m_mesh_tri[tt][0]], + search->points[m_mesh_tri[tt][1]], + search->points[m_mesh_tri[tt][2]])); + } + search->tree.rebuild(search->triangles.begin(), search->triangles.end()); + search->tree.accelerate_distance_queries(); + + PyObject* result = PyCapsule_New((void*)search, NULL, aabb_tree_destructor); + return result; +} + +void spatialsearch_aabbtree_nearest_one(int ss, TreeAndTri * search, std::vector &sample_points, + uint32_t* closest_triangles, uint32_t* closest_part, array* closest_point) +{ + Tree::Point_and_primitive_id closest=search->tree.closest_point_and_primitive(sample_points[ss]); + closest_triangles[ss]=std::distance(search->triangles.begin(), closest.second); + for(size_t cc=0; cc<3; ++cc) { + closest_point[ss][cc]=closest.first[cc]; + } + K k; + K::Point_3 result; + closest_part[ss]=CGAL::iev::nearest_primitive(sample_points[ss], *closest.second, result, k); +} + +#ifdef HAVE_TBB + +class AaBbTreeNearestTbb { + TreeAndTri *search; + std::vector *sample_points; + uint32_t* closest_triangles; + uint32_t* closest_part; + array* closest_point; +public: + void operator()(const blocked_range& r) const { + for (size_t i=r.begin(); i!=r.end(); ++i) + spatialsearch_aabbtree_nearest_one(i, search, *sample_points, closest_triangles, closest_part, closest_point); + } + AaBbTreeNearestTbb( TreeAndTri * search, std::vector *sample_points, + uint32_t* closest_triangles, uint32_t* closest_part, array* closest_point) : + search(search), sample_points(sample_points), + closest_triangles(closest_triangles), + closest_part(closest_part), + closest_point(closest_point) {} +}; + +#endif + +static PyObject* spatialsearch_aabbtree_nearest(PyObject *self, PyObject *args) +{ + PyObject *py_tree, *py_v; + if (!PyArg_ParseTuple(args, "OO!", &py_tree, &PyArray_Type, &py_v)) + return NULL; + TreeAndTri *search = (TreeAndTri *) PyCapsule_GetPointer(py_tree, NULL); + + npy_intp* v_dims = PyArray_DIMS(py_v); + + if (v_dims[1] != 3) { + PyErr_SetString(PyExc_ValueError, "Input must be Nx3"); + return NULL; + } + + size_t S=v_dims[0]; + + array* m_sample_points=reinterpret_cast*>(PyArray_DATA(py_v)); + + #ifdef _OPENMP + omp_set_num_threads(8); + #endif + + std::vector sample_points; + sample_points.reserve(S); + for(size_t ss=0; ss(PyArray_DATA(result1)); + uint32_t* closest_part=reinterpret_cast(PyArray_DATA(result2)); + + npy_intp result3_dims[] = {S, 3}; + PyObject *result3 = PyArray_SimpleNew(2, result3_dims, NPY_DOUBLE); + array* closest_point = reinterpret_cast*>(PyArray_DATA(result3)); + + +#ifdef HAVE_TBB + parallel_for(blocked_range(0,S), AaBbTreieNearestTbb(search, &sample_points, closest_triangles, closest_part, closest_point)); +#else +#ifdef HAVE_OPENMP + #pragma omp parallel for +#endif + for(size_t ss = 0; ss < S; ++ss) { + spatialsearch_aabbtree_nearest_one(ss, search, sample_points, closest_triangles, closest_part, closest_point); + } +#endif + return Py_BuildValue("NNN", result1, result2, result3); +} + +static PyObject* spatialsearch_aabbtree_nearest_alongnormal(PyObject *self, PyObject *args) +{ + PyObject *py_tree, *py_p, *py_n; + if (!PyArg_ParseTuple(args, "OO!O!", &py_tree, &PyArray_Type, &py_p, &PyArray_Type, &py_n)) + return NULL; + TreeAndTri *search = (TreeAndTri *) PyCapsule_GetPointer(py_tree, NULL); + + npy_intp* p_dims = PyArray_DIMS(py_p); + npy_intp* n_dims = PyArray_DIMS(py_p); + + if (p_dims[1] != 3 || n_dims[1] != 3 || p_dims[0] != n_dims[0]) { + PyErr_SetString(PyExc_ValueError, "Points and normals must be Nx3"); + return NULL; + } + + size_t S=p_dims[0]; + + array* p_arr = reinterpret_cast*>(PyArray_DATA(py_p)); + array* n_arr = reinterpret_cast*>(PyArray_DATA(py_n)); + + #ifdef _OPENMP + omp_set_num_threads(8); + #endif + + std::vector p_v; + std::vector n_v; + p_v.reserve(S); + n_v.reserve(S); + for(size_t ss = 0; ss < S; ++ss) { + p_v.push_back(K::Point_3(p_arr[ss][0], p_arr[ss][1], p_arr[ss][2])); + n_v.push_back(K::Vector_3(n_arr[ss][0], n_arr[ss][1], n_arr[ss][2])); + } + + npy_intp result1_dims[] = {S}; + + PyObject *result1 = PyArray_SimpleNew(1, result1_dims, NPY_DOUBLE); + + double* distance = reinterpret_cast(PyArray_DATA(result1)); + + PyObject *result2 = PyArray_SimpleNew(1, result1_dims, NPY_UINT32); + uint32_t* closest_triangles = reinterpret_cast(PyArray_DATA(result2)); + + npy_intp result3_dims[] = {S, 3}; + PyObject *result3 = PyArray_SimpleNew(2, result3_dims, NPY_DOUBLE); + array* closest_point = reinterpret_cast*>(PyArray_DATA(result3)); + +#ifdef HAVE_OPENMP + #pragma omp parallel for +#endif + for(size_t ss=0; ss intersections; + search->tree.all_intersections(K::Ray_3(p_v[ss],n_v[ss]), std::back_inserter(intersections)); + search->tree.all_intersections(K::Ray_3(p_v[ss],-n_v[ss]), std::back_inserter(intersections)); + + std::list::iterator it_int; + std::vector ss_dists; + std::vector ss_points; + std::vector ss_tris; + for(it_int=intersections.begin(); it_int!=intersections.end(); ++it_int){ + CGAL::Object object = it_int->first; + + ss_tris.push_back(std::distance(search->triangles.begin(), it_int->second)); + + K::Point_3 point; + K::Segment_3 segment; + if(CGAL::assign(point,object)){ + ss_dists.push_back(sqrt((point - p_v[ss]).squared_length())); + ss_points.push_back(point); + } + if(CGAL::assign(segment,object)){ + K::Point_3 ray_s = p_v[ss]; + K::Vector_3 ray_dir = n_v[ss]; + K::Point_3 seg_s = segment.source(); + K::Vector_3 seg_dir = segment.to_vector(); + K::Vector_3 source_diff = ray_s - seg_s; + double ts = ((source_diff.y())*ray_dir.x() - (source_diff.x())*ray_dir.y())/(seg_dir.y()*ray_dir.x() - seg_dir.x()*ray_dir.y()); + point = seg_s + ts*seg_dir; + if(!segment.has_on(point)) + std::cerr << "ERROR:Debug segment intersection" << std::endl; + ss_points.push_back(point); + ss_dists.push_back(sqrt((point - p_v[ss]).squared_length())); + } + } + if(ss_dists.empty()){ + distance[ss] = 1e100; + } + else{ + //distance[ss] = *std::min_element(ss_dists.begin(), ss_dists.end()); + size_t idx = std::distance(ss_dists.begin(), std::min_element(ss_dists.begin(), ss_dists.end())); + distance[ss] = ss_dists[idx]; + for(size_t cc=0; cc<3; ++cc) { + closest_point[ss][cc]=ss_points[idx][cc]; + } + closest_triangles[ss] = ss_tris[idx]; + } + } + return Py_BuildValue("NNN", result1, result2, result3); +} + + +static PyObject * spatialsearch_aabbtree_intersections_indices(PyObject *self, PyObject *args, PyObject *keywds) { + try { + PyObject *py_tree=NULL; + PyArrayObject *py_qv=NULL, *py_qf=NULL; + + // a copy of the literal string is done into a (non const) char + char key1[] = "tree"; + char key2[] = "qv"; + char key3[] = "qf"; + static char* kwlist[] = {key1, key2, key3, NULL}; + + if (!PyArg_ParseTupleAndKeywords(args, keywds, "OO!O!", kwlist, + &py_tree, + &PyArray_Type, &py_qv, + &PyArray_Type, &py_qf)) { + return NULL; + } + + TreeAndTri *search = (TreeAndTri *)PyCapsule_GetPointer(py_tree, NULL); + + if (py_qv->descr->type_num != NPY_DOUBLE || py_qv->nd != 2) { + PyErr_SetString(PyExc_ValueError, "Query Vertices must be of type double, and 2 dimensional"); + return NULL; + } + + if (py_qf->descr->type_num != NPY_UINT32 || py_qf->nd != 2) { + PyErr_SetString(PyExc_ValueError, "Query Faces must be of type uint32, and 2 dimensional"); + return NULL; + } + + // QUERY MESH STRUCTURE + npy_intp* qv_dims = PyArray_DIMS(py_qv); + npy_intp* qf_dims = PyArray_DIMS(py_qf); + + if (qv_dims[1] != 3 || qf_dims[1] != 3) { + PyErr_SetString(PyExc_ValueError, "Input must be Nx3"); + return NULL; + } + + double *pQV = (double*)PyArray_DATA(py_qv); + uint32_t *pQF = (uint32_t*)PyArray_DATA(py_qf); + + size_t q_n_verts = qv_dims[0]; + size_t q_n_faces = qf_dims[0]; + + // BUILD STRUCTURE FOR QUERY MESH + const array* q_faces_arr=reinterpret_cast*>(pQF); + const array* q_verts_arr=reinterpret_cast*>(pQV); + + std::vector q_verts_v; + q_verts_v.reserve(q_n_verts); + for(size_t pp=0; pp q_faces_v; + q_faces_v.reserve(q_n_faces); + for(size_t tt=0; tt mesh_intersections; + #pragma omp parallel for + for(size_t tt=0; tttree.do_intersect(triangle_query)) { + mesh_intersections.push_back(tt); + } + } + + // GET RESULT BACK + npy_intp result_dims[] = {mesh_intersections.size()}; + PyObject *result = PyArray_SimpleNew(1, result_dims, NPY_UINT32); + + uint32_t* mesh_intersections_arr = reinterpret_cast(PyArray_DATA(result)); + std::copy(mesh_intersections.begin(), mesh_intersections.end(),mesh_intersections_arr); + + return Py_BuildValue("N",result); + } catch (Mesh_IntersectionsException& e) { + PyErr_SetString(Mesh_IntersectionsError, e.what()); + return NULL; + } +} diff --git a/mesh-master/mesh/src/visibility.cpp b/mesh-master/mesh/src/visibility.cpp new file mode 100644 index 0000000000000000000000000000000000000000..052a0b4ec4740fbd7e9efea4e91a9a678fcabead --- /dev/null +++ b/mesh-master/mesh/src/visibility.cpp @@ -0,0 +1,174 @@ +#define CGAL_CFG_NO_CPP0X_VARIADIC_TEMPLATES 1 +#include +#include +#include +#include +#include + +#include +#include + +#include +#include + +#if HAVE_TBB +#include "tbb/parallel_for.h" +#include "tbb/parallel_for_each.h" +#include "tbb/blocked_range.h" +#include "tbb/task_scheduler_init.h" +#include +#endif + +#ifdef HAVE_OPENMP +#include +#endif + +#include "nearest_triangle.hpp" + +using boost::uint32_t; +using boost::array; +using std::vector; + +typedef CGAL::Simple_cartesian K; +typedef K::Point_3 Point; +typedef K::Segment_3 Segment; +typedef K::Vector_3 Vector; +typedef K::Triangle_3 Triangle; +typedef K::Ray_3 Ray; + +typedef std::vector::iterator Iterator; +typedef CGAL::AABB_triangle_primitive Primitive; + +typedef CGAL::AABB_traits AABB_triangle_traits; +typedef AABB_triangle_traits::Point_and_primitive_id Point_and_Primitive_id; +typedef CGAL::AABB_tree Tree; +typedef Tree::Object_and_primitive_id Object_and_Primitive_id; + +#include "visibility.h" + +struct VisibilityTask{ + const array* sensors_arr; + const array* cams_arr; + const std::vector& verts_v; + const std::vector& normals_v; + const bool use_sensors; + const Tree& tree; + const double min_dist; + uint32_t* visibility_mat; + double* normal_dot_cam_mat; + + VisibilityTask(const array* sensors_arr, + const array* cams_arr, + const std::vector& verts_v, + const std::vector& normals_v, + const bool use_sensors, + const Tree& tree, + const double& min_dist, + uint32_t* visibility_mat, + double* normal_dot_cam_mat): + sensors_arr(sensors_arr), cams_arr(cams_arr), verts_v(verts_v), + normals_v(normals_v), use_sensors(use_sensors), + tree(tree), + min_dist(min_dist), visibility_mat(visibility_mat), + normal_dot_cam_mat(normal_dot_cam_mat){;} + + void operator() (const int icam) const{ + Point cam(cams_arr[icam][0], cams_arr[icam][1], cams_arr[icam][2]); + K::Vector_3 xoff,yoff,zoff; + double planeoff; + if (use_sensors){ + xoff = K::Vector_3(sensors_arr[icam][0],sensors_arr[icam][1],sensors_arr[icam][2]); + yoff = K::Vector_3(sensors_arr[icam][3],sensors_arr[icam][4],sensors_arr[icam][5]); + zoff = K::Vector_3(-sensors_arr[icam][6],-sensors_arr[icam][7],-sensors_arr[icam][8]); + // compute D; dot product between plane normal zoff and a point (cam+zoff) on the plane + planeoff = zoff*((cam+zoff)-CGAL::ORIGIN); + } + for(unsigned ivert=0; ivert x*y < ||y||^2 + uint32_t reach_sensor = ((fabs(p_i*xoff) < xoff.squared_length()) && + (fabs(p_i*yoff) < yoff.squared_length())); + visibility_mat[ivert + icam*verts_v.size()] = reach_sensor; + } + else + visibility_mat[ivert + icam*verts_v.size()] = 0; + } + else + visibility_mat[ivert + icam*verts_v.size()] = reach_lens; + } + } + +#if HAVE_TBB + void operator()( const tbb::blocked_range& range) const{ + for(int icam=range.begin(); icam!=range.end(); ++icam) + this->operator()(icam); + } +#elif HAVE_OMP + #pragma omp parallel for + void operator()( const std::vector& range) const{ + for(std::vector::const_iterator itcam=range.begin(); itcam!=range.end(); ++itcam) + this->operator()(*itcam); + } +#else + void operator()( const std::vector& range) const{ + for(std::vector::const_iterator itcam=range.begin(); itcam!=range.end(); ++itcam) + this->operator()(*itcam); + } +#endif +}; + +void _internal_compute(const TreeAndTri* search, const double* normals, + const double* cams, const size_t n_cams, + const bool use_sensors, const double* sensors, + const double& min_dist, uint32_t *visibility_mat, + double *normal_dot_cam_mat){ + + + std::vector normals_v; + if(normals != NULL){ + const array* normals_arr=reinterpret_cast*>(normals); + normals_v.reserve(search->points.size()); + for(size_t pp=0; pppoints.size(); ++pp){ + normals_v.push_back(K::Vector_3(normals_arr[pp][0], + normals_arr[pp][1], + normals_arr[pp][2])); + } + } + + const array* cams_arr=reinterpret_cast*>(cams); + const array* sensors_arr=reinterpret_cast*>(sensors); + + VisibilityTask vtask(sensors_arr, cams_arr, search->points, normals_v, + use_sensors, search->tree, + min_dist, visibility_mat, normal_dot_cam_mat); +#if HAVE_TBB + tbb::task_scheduler_init init; + tbb::parallel_for( tbb::blocked_range(0,n_cams), vtask); +#else + std::vector range(n_cams); + for(unsigned i=0;i(0), + // boost::counting_iterator(n_cams), + // vtask); + //vtask(tbb::blocked_range(0,n_cams)); +} + diff --git a/mesh-master/mesh/src/visibility.h b/mesh-master/mesh/src/visibility.h new file mode 100644 index 0000000000000000000000000000000000000000..4098d385fe844f41d5299b813ee7714c9e707ce5 --- /dev/null +++ b/mesh-master/mesh/src/visibility.h @@ -0,0 +1,24 @@ +#ifndef VISIBILITY_H +#define VISIBILITY_H +#include +#define CGAL_CFG_NO_CPP0X_VARIADIC_TEMPLATES 1 +#include +#include +#include + +typedef CGAL::Simple_cartesian::Point_3 Point; +void _internal_compute(const TreeAndTri* search, const double* normals, + const double* cams, const size_t n_cams, + const bool use_sensors, const double* sensors, + const double& min_dist, uint32_t *visibility_mat, + double *normal_dot_cam_mat); + +class VisibilityException: public std::exception { +public: + VisibilityException(std::string m="VisibilityException!"):msg(m) {} + ~VisibilityException() throw() {} + const char* what() const throw() { return msg.c_str(); } +private: + std::string msg; +}; +#endif // VISIBILITY_H diff --git a/mesh-master/mesh/texture.py b/mesh-master/mesh/texture.py new file mode 100644 index 0000000000000000000000000000000000000000..5f6032b452c064b4c735cc1c9d2d7e4c01ce5351 --- /dev/null +++ b/mesh-master/mesh/texture.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2013 Max Planck Society. All rights reserved. +# Created by Matthew Loper on 2013-02-20. + + +import numpy as np + +""" +texture.py + +""" + +__all__ = ['texture_coordinates_by_vertex', ] + + +def texture_coordinates_by_vertex(self): + texture_coordinates_by_vertex = [[] for i in range(len(self.v))] + for i, face in enumerate(self.f): + for j in [0, 1, 2]: + texture_coordinates_by_vertex[face[j]].append(self.vt[self.ft[i][j]]) + return texture_coordinates_by_vertex + + +def reload_texture_image(self): + import cv2 + # image is loaded as image_height-by-image_width-by-3 array in BGR color order. + self._texture_image = cv2.imread(self.texture_filepath) if self.texture_filepath else None + texture_sizes = [32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384] + if self._texture_image is not None and (self._texture_image.shape[0] != self._texture_image.shape[1] or + self._texture_image.shape[0] not in texture_sizes or + self._texture_image.shape[0] not in texture_sizes): + closest_texture_size_idx = (np.abs(np.array(texture_sizes) - max(self._texture_image.shape))).argmin() + sz = texture_sizes[closest_texture_size_idx] + self._texture_image = cv2.resize(self._texture_image, (sz, sz)) + + +def load_texture(self, texture_version): + ''' + Expect a texture version number as an integer, load the texture version from 'texture_path' (global variable to the + package). + Currently there are versions [0,1,2,3] available. + ''' + import os + from . import texture_path + + lowres_tex_template = os.path.join(texture_path, 'textured_template_low_v%d.obj' % texture_version) + highres_tex_template = os.path.join(texture_path, 'textured_template_high_v%d.obj' % texture_version) + from .mesh import Mesh + + mesh_with_texture = Mesh(filename=lowres_tex_template) + if not np.all(mesh_with_texture.f.shape == self.f.shape): + mesh_with_texture = Mesh(filename=highres_tex_template) + self.transfer_texture(mesh_with_texture) + + +def transfer_texture(self, mesh_with_texture): + if not np.all(mesh_with_texture.f.shape == self.f.shape): + raise Exception('Mesh topology mismatch') + + self.vt = mesh_with_texture.vt.copy() + self.ft = mesh_with_texture.ft.copy() + + if not np.all(mesh_with_texture.f == self.f): + if np.all(mesh_with_texture.f == np.fliplr(self.f)): + self.ft = np.fliplr(self.ft) + else: + # Same shape; let's see if it's face ordering; this could be a bit faster... + face_mapping = {} + for f, ii in zip(self.f, range(len(self.f))): + face_mapping[" ".join([str(x) for x in sorted(f)])] = ii + self.ft = np.zeros(self.f.shape, dtype=np.uint32) + + for f, ft in zip(mesh_with_texture.f, mesh_with_texture.ft): + k = " ".join([str(x) for x in sorted(f)]) + if k not in face_mapping: + raise Exception('Mesh topology mismatch') + # the vertex order can be arbitrary... + ids = [] + for f_id in f: + ids.append(np.where(self.f[face_mapping[k]] == f_id)[0][0]) + ids = np.array(ids) + self.ft[face_mapping[k]] = np.array(ft[ids]) + + self.texture_filepath = mesh_with_texture.texture_filepath + self._texture_image = None + + +def set_texture_image(self, path_to_texture): + self.texture_filepath = path_to_texture + + +def texture_rgb(self, texture_coordinate): + h, w = np.array(self.texture_image.shape[:2]) - 1 + return np.double(self.texture_image[int(h * (1.0 - texture_coordinate[1]))][int(w * (texture_coordinate[0]))])[::-1] + + +def texture_rgb_vec(self, texture_coordinates): + h, w = np.array(self.texture_image.shape[:2]) - 1 + n_ch = self.texture_image.shape[2] + # XXX texture_coordinates can be lower than 0! clip needed! + d1 = (h * (1.0 - np.clip(texture_coordinates[:, 1], 0, 1))).astype(np.int) + d0 = (w * (np.clip(texture_coordinates[:, 0], 0, 1))).astype(np.int) + flat_texture = self.texture_image.flatten() + indices = np.hstack([((d1 * (w + 1) * n_ch) + (d0 * n_ch) + (2 - i)).reshape(-1, 1) for i in range(n_ch)]) + return flat_texture[indices] diff --git a/mesh-master/mesh/thirdparty/CGAL-4.7.tar.gz b/mesh-master/mesh/thirdparty/CGAL-4.7.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..2a6fae6710fea9321761a7e7dc960a7ca5981f70 --- /dev/null +++ b/mesh-master/mesh/thirdparty/CGAL-4.7.tar.gz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1be058fe9fc4d8331b48daf8beb114a049fd4970220d8a570ff709b7789dacae +size 20046799 diff --git a/mesh-master/mesh/topology/__init__.py b/mesh-master/mesh/topology/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..52544ca0073c7ac8e827b343e4911fd197faf38b --- /dev/null +++ b/mesh-master/mesh/topology/__init__.py @@ -0,0 +1,4 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2016 Max Planck Society. All rights reserved. diff --git a/mesh-master/mesh/topology/connectivity.py b/mesh-master/mesh/topology/connectivity.py new file mode 100644 index 0000000000000000000000000000000000000000..24626082ee56e2835ad0536883b6f2c94bcafce1 --- /dev/null +++ b/mesh-master/mesh/topology/connectivity.py @@ -0,0 +1,203 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2012 Max Planck Society. All rights reserved. +# Created by Matthew Loper on 2012-10-24. + +import os +import zlib +import numpy as np +import pickle +import scipy.sparse as sp + +from ..utils import row, col +from .. import mesh_package_cache_folder + + +def get_vert_opposites_per_edge(mesh): + """Returns a dictionary from vertidx-pairs to opposites. + For example, a key consist of [4,5)] meaning the edge between + vertices 4 and 5, and a value might be [10,11] which are the indices + of the vertices opposing this edge.""" + result = {} + for f in mesh.f: + for i in range(3): + key = [f[i], f[(i + 1) % 3]] + key.sort() + key = tuple(key) + val = f[(i + 2) % 3] + + if key in result: + result[key].append(val) + else: + result[key] = [val] + return result + + +def get_vert_connectivity(mesh): + """Returns a sparse matrix (of size #verts x #verts) where each nonzero + element indicates a neighborhood relation. For example, if there is a + nonzero element in position (15,12), that means vertex 15 is connected + by an edge to vertex 12.""" + + vpv = sp.csc_matrix((len(mesh.v), len(mesh.v))) + + # for each column in the faces... + for i in range(3): + IS = mesh.f[:, i] + JS = mesh.f[:, (i + 1) % 3] + data = np.ones(len(IS)) + ij = np.vstack((row(IS.flatten()), row(JS.flatten()))) + mtx = sp.csc_matrix((data, ij), shape=vpv.shape) + vpv = vpv + mtx + mtx.T + + return vpv + + +def vertices_to_edges_matrix(mesh, want_xyz=True): + """Returns a matrix M, which if multiplied by vertices, + gives back edges (so "e = M.dot(v)"). Note that this generates + one edge per edge, *not* two edges per triangle. + + :param mesh: the mesh to process + :param want_xyz: if true, takes and returns xyz coordinates, otherwise + takes and returns x *or* y *or* z coordinates + """ + + vpe = get_vertices_per_edge(mesh) + IS = np.repeat(np.arange(len(vpe)), 2) + JS = vpe.flatten() + data = np.ones_like(vpe) + data[:, 1] = -1 + data = data.flatten() + + if want_xyz: + IS = np.concatenate((IS * 3, IS * 3 + 1, IS * 3 + 2)) + JS = np.concatenate((JS * 3, JS * 3 + 1, JS * 3 + 2)) + data = np.concatenate((data, data, data)) + + ij = np.vstack((IS.flatten(), JS.flatten())) + return sp.csc_matrix((data, ij)) + + +def vertices_in_common(face_1, face_2): + """Returns the two vertices shared by two faces, + optimized for the case of triangular faces with two vertices in common""" + if len(face_1) == 3 and len(face_2) == 3: + vertices_in_common = [None, None] + i = 0 + if (face_1[0] == face_2[0]) or (face_1[0] == face_2[1]) or (face_1[0] == face_2[2]): + vertices_in_common[i] = face_1[0] + i += 1 + if (face_1[1] == face_2[0]) or (face_1[1] == face_2[1]) or (face_1[1] == face_2[2]): + vertices_in_common[i] = face_1[1] + i += 1 + if (face_1[2] == face_2[0]) or (face_1[2] == face_2[1]) or (face_1[2] == face_2[2]): + vertices_in_common[i] = face_1[2] + i += 1 + if i == 2: + if vertices_in_common[0] > vertices_in_common[1]: + vertices_in_common = [vertices_in_common[1], vertices_in_common[0]] + return vertices_in_common + elif i < 2: + return [vertices_in_common[0]] if i else [] + else: + return np.intersect1d(face_1, face_2) + + +def get_vertices_per_edge(mesh, faces_per_edge=None): + """Returns an Ex2 array of adjacencies between vertices, where + each element in the array is a vertex index. Each edge is included + only once. If output of get_faces_per_edge is provided, this is used to + avoid call to get_vert_connectivity()""" + + faces = mesh.f + suffix = str(zlib.crc32(faces_per_edge.flatten())) if faces_per_edge is not None else '' + cache_fname = os.path.join(mesh_package_cache_folder, 'verts_per_edge_cache_' + str(zlib.crc32(faces.flatten())) + '_' + suffix + '.pkl') + try: + with open(cache_fname, 'rb') as fp: + return(pickle.load(fp)) + except: + if faces_per_edge is not None: + result = np.asarray(np.vstack([row(np.intersect1d(mesh.f[k[0]], mesh.f[k[1]])) for k in faces_per_edge]), np.uint32) + else: + vc = sp.coo_matrix(get_vert_connectivity(mesh)) + result = np.hstack((col(vc.row), col(vc.col))) + result = result[result[:, 0] < result[:, 1]] # for uniqueness + + with open(cache_fname, 'wb') as fp: + pickle.dump(result, fp, -1) + return result + + # s1 = [set([v[0], v[1]]) for v in mesh.v] + # s2 = [set([v[1], v[2]]) for v in mesh.v] + # s3 = [set([v[2], v[0]]) for v in mesh.v] + # + # return s1+s2+s3 + + +def get_faces_per_edge(mesh): + + faces = mesh.f + cache_fname = os.path.join(mesh_package_cache_folder, 'edgecache_new_' + str(zlib.crc32(faces.flatten())) + '.pkl') + + try: + with open(cache_fname, 'rb') as fp: + return(pickle.load(fp)) + except: + f = faces + IS = np.repeat(np.arange(len(f)), 3) + JS = f.ravel() + data = np.ones(IS.size) + f2v = sp.csc_matrix((data, (IS, JS)), shape=(len(f), np.max(f.ravel()) + 1)) + f2f = f2v.dot(f2v.T) + f2f = f2f.tocoo() + f2f = np.hstack((col(f2f.row), col(f2f.col), col(f2f.data))) + which = (f2f[:, 0] < f2f[:, 1]) & (f2f[:, 2] >= 2) + result = np.asarray(f2f[which, :2], np.uint32) + + with open(cache_fname, 'wb') as fp: + pickle.dump(result, fp, -1) + return result + + +def get_faces_per_edge_old(mesh): + """Returns an Ex2 array of adjacencies between faces, where + each element in the array is a face index. Each edge is included + only once. + + Assumes that the mesh's faces are either all CW or all CCW (but not a mix). + """ + + faces = mesh.f + cache_fname = os.path.join(mesh_package_cache_folder, 'edgecache_old_' + str(zlib.crc32(faces.flatten())) + '.pkl') + try: + with open(cache_fname, 'rb') as fp: + return(pickle.load(fp)) + except: + # Raffi: not used + # num_verts = len(mesh.v) + # e1 = sp.csc_matrix((num_verts, num_verts)) + # e2 = sp.csc_matrix((num_verts, num_verts)) + + IS = np.hstack((faces[:, 0], faces[:, 1], faces[:, 2])).T + JS = np.hstack((faces[:, 1], faces[:, 2], faces[:, 0])).T + VS = np.hstack((np.tile(col(np.arange(len(faces))), (3, 1)))).T + VS = VS + 1 # add "1" so that face "0" won't be ignored in sparse arrays + + adj_mtx_csc = sp.csc_matrix((VS, np.vstack((row(IS), row(JS))))) + adj_mtx_coo = sp.coo_matrix((VS, np.vstack((row(IS), row(JS))))) + + edges = [] + for i in xrange(len(adj_mtx_coo.row)): + r = adj_mtx_coo.row[i] + c = adj_mtx_coo.col[i] + if r < c: + edges.append(row(np.array([adj_mtx_csc[c, r], adj_mtx_csc[r, c]]))) + + edges = np.concatenate(edges, axis=0) + edges = edges - 1 # get rid of "1" we added on earlier + with open(cache_fname, 'wb') as fp: + pickle.dump(edges, fp, -1) + + return edges diff --git a/mesh-master/mesh/topology/decimation.py b/mesh-master/mesh/topology/decimation.py new file mode 100644 index 0000000000000000000000000000000000000000..43c4fb5d7d60d6070f3ef6c7500cda6267f4ac1f --- /dev/null +++ b/mesh-master/mesh/topology/decimation.py @@ -0,0 +1,223 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2013 Max Planck Society. All rights reserved. + +import numpy as np +import scipy +import math +import heapq +import scipy.sparse as sp + +from .linear_mesh_transform import LinearMeshTransform + + +def remove_redundant_verts(v, f, eps=1e-10): + """Given verts and faces, this remove colocated vertices and returns + a new 'f' and 'v' that discludes them""" + + fshape = f.shape + + dist_mtx = scipy.spatial.distance.squareform(scipy.spatial.distance.pdist(v)) + redundant = np.asarray(dist_mtx < eps, np.uint32) + + # update faces to not refer to redundant vertices + f = f.flatten() + for i in range(redundant.shape[0]): + which_verts = np.nonzero(redundant[i, :])[0] + if len(which_verts) < 2: + continue + which_facelocs = np.nonzero(np.in1d(f, which_verts))[0] + f[which_facelocs] = np.min(which_verts) + + # get rid of unused verts, and update faces accordingly + vertidxs_left = np.unique(f) + repl = np.arange(np.max(f) + 1) + repl[vertidxs_left] = np.arange(len(vertidxs_left)) + v = v[vertidxs_left] + f = repl[f].reshape((-1, fshape[1])) + + return (v, f) + + +def vertex_quadrics(mesh): + """Computes a quadric for each vertex in the Mesh. + + Returns: + v_quadrics: an (N x 4 x 4) array, where N is # vertices. + """ + + # Allocate quadrics + v_quadrics = np.zeros((len(mesh.v), 4, 4,)) + + # For each face... + for f_idx in range(len(mesh.f)): + + # Compute normalized plane equation for that face + vert_idxs = mesh.f[f_idx] + verts = np.hstack((mesh.v[vert_idxs], np.array([1, 1, 1]).reshape(-1, 1))) + u, s, v = np.linalg.svd(verts) + eq = v[-1, :].reshape(-1, 1) + eq = eq / (np.linalg.norm(eq[0:3])) + + # Add the outer product of the plane equation to the + # quadrics of the vertices for this face + for k in range(3): + v_quadrics[mesh.f[f_idx, k], :, :] += np.outer(eq, eq) + + return v_quadrics + + +def qslim_decimator_fast(mesh, factor=None, n_verts_desired=None): + from experiments.qslim.simplify_ply_nowrite import load_from_ply_qslim + if factor is None: + factor = float(n_verts_desired) / len(mesh.v) + return load_from_ply_qslim(mesh, n_tris=len(mesh.f) * factor, want_optimal=True) + + +def qslim_decimator_transformer(mesh, factor=None, n_verts_desired=None): + """Return a simplified version of this mesh. + + A Qslim-style approach is used here. + + :param factor: fraction of the original vertices to retain + :param n_verts_desired: number of the original vertices to retain + :returns: new_faces: An Fx3 array of faces, mtx: Transformation matrix + """ + + if factor is None and n_verts_desired is None: + raise Exception('Need either factor or n_verts_desired.') + + if n_verts_desired is None: + n_verts_desired = math.ceil(len(mesh.v) * factor) + + Qv = vertex_quadrics(mesh) + + # fill out a sparse matrix indicating vertex-vertex adjacency + from .connectivity import get_vertices_per_edge + vert_adj = get_vertices_per_edge(mesh) + # vert_adj = sp.lil_matrix((len(mesh.v), len(mesh.v))) + # for f_idx in range(len(mesh.f)): + # vert_adj[mesh.f[f_idx], mesh.f[f_idx]] = 1 + + vert_adj = sp.csc_matrix((vert_adj[:, 0] * 0 + 1, (vert_adj[:, 0], vert_adj[:, 1])), shape=(len(mesh.v), len(mesh.v))) + vert_adj = vert_adj + vert_adj.T + vert_adj = vert_adj.tocoo() + + def collapse_cost(Qv, r, c, v): + Qsum = Qv[r, :, :] + Qv[c, :, :] + p1 = np.vstack((v[r].reshape(-1, 1), np.array([1]).reshape(-1, 1))) + p2 = np.vstack((v[c].reshape(-1, 1), np.array([1]).reshape(-1, 1))) + + destroy_c_cost = p1.T.dot(Qsum).dot(p1) + destroy_r_cost = p2.T.dot(Qsum).dot(p2) + result = { + 'destroy_c_cost': destroy_c_cost, + 'destroy_r_cost': destroy_r_cost, + 'collapse_cost': min([destroy_c_cost, destroy_r_cost]), + 'Qsum': Qsum} + return result + + # construct a queue of edges with costs + queue = [] + for k in range(vert_adj.nnz): + r = vert_adj.row[k] + c = vert_adj.col[k] + + if r > c: + continue + + cost = collapse_cost(Qv, r, c, mesh.v)['collapse_cost'] + heapq.heappush(queue, (cost, (r, c))) + + # decimate + collapse_list = [] + nverts_total = len(mesh.v) + faces = mesh.f.copy() + while nverts_total > n_verts_desired: + e = heapq.heappop(queue) + r = e[1][0] + c = e[1][1] + if r == c: + continue + + cost = collapse_cost(Qv, r, c, mesh.v) + if cost['collapse_cost'] > e[0]: + heapq.heappush(queue, (cost['collapse_cost'], e[1])) + # print('found outdated cost, %.2f < %.2f' % (e[0], cost['collapse_cost'])) + continue + else: + + # update old vert idxs to new one, + # in queue and in face list + if cost['destroy_c_cost'] < cost['destroy_r_cost']: + to_destroy = c + to_keep = r + else: + to_destroy = r + to_keep = c + + collapse_list.append([to_keep, to_destroy]) + + # in our face array, replace "to_destroy" vertidx with "to_keep" vertidx + np.place(faces, faces == to_destroy, to_keep) + + # same for queue + which1 = [idx for idx in range(len(queue)) if queue[idx][1][0] == to_destroy] + which2 = [idx for idx in range(len(queue)) if queue[idx][1][1] == to_destroy] + for k in which1: + queue[k] = (queue[k][0], (to_keep, queue[k][1][1])) + for k in which2: + queue[k] = (queue[k][0], (queue[k][1][0], to_keep)) + + Qv[r, :, :] = cost['Qsum'] + Qv[c, :, :] = cost['Qsum'] + + a = faces[:, 0] == faces[:, 1] + b = faces[:, 1] == faces[:, 2] + c = faces[:, 2] == faces[:, 0] + + # remove degenerate faces + def logical_or3(x, y, z): + return np.logical_or(x, np.logical_or(y, z)) + + faces_to_keep = np.logical_not(logical_or3(a, b, c)) + faces = faces[faces_to_keep, :].copy() + + nverts_total = (len(np.unique(faces.flatten()))) + + new_faces, mtx = _get_sparse_transform(faces, len(mesh.v)) + return new_faces, mtx + +def qslim_decimator(mesh, factor=None, n_verts_desired=None): + """Return a simplified version of this mesh. + + A Qslim-style approach is used here. + + :param factor: fraction of the original vertices to retain + :param n_verts_desired: number of the original vertices to retain + :returns: An Fx3 array of faces. + """ + new_faces, mtx = qslim_decimator_transformer(mesh, factor, n_verts_desired) + return LinearMeshTransform(mtx, new_faces) + +def _get_sparse_transform(faces, num_original_verts): + verts_left = np.unique(faces.flatten()) + IS = np.arange(len(verts_left)) + JS = verts_left + data = np.ones(len(JS)) + + # + mp = np.arange(0, np.max(faces.flatten()) + 1) + mp[JS] = IS + new_faces = mp[faces.copy().flatten()].reshape((-1, 3)) + + # for x,y,z coords + IS = np.concatenate((IS * 3, IS * 3 + 1, IS * 3 + 2)) + JS = np.concatenate((JS * 3, JS * 3 + 1, JS * 3 + 2)) + data = np.concatenate((data, data, data)) + + ij = np.vstack((IS.flatten(), JS.flatten())) + mtx = sp.csc_matrix((data, ij), shape=(len(verts_left) * 3, num_original_verts * 3)) + + return (new_faces, mtx) diff --git a/mesh-master/mesh/topology/linear_mesh_transform.py b/mesh-master/mesh/topology/linear_mesh_transform.py new file mode 100644 index 0000000000000000000000000000000000000000..5aa649a4a3f14fd2ccc3da9bdf832e7ee94156ac --- /dev/null +++ b/mesh-master/mesh/topology/linear_mesh_transform.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2012 Max Planck Society. All rights reserved. +# Created by Matthew Loper on 2012-10-30. + + +import numpy as np + +from ..mesh import Mesh +from ..utils import row, col +from .connectivity import vertices_to_edges_matrix + + +class LinearMeshTransform(object): + def __init__(self, mtx, faces, vt=None, ft=None): + self.mtx = mtx + self.faces = faces + self.remeshed_vtx_to_remeshed_edge_mtx = vertices_to_edges_matrix(Mesh(f=faces, v=np.zeros((mtx.shape[0], 3))), want_xyz=True) + self.vtx_to_edge_mtx = self.remeshed_vtx_to_remeshed_edge_mtx.dot(self.mtx) + if vt is not None: + self.vt = vt + if ft is not None: + self.ft = ft + + def __call__(self, a, want_edges=False): + + if not isinstance(a, Mesh): + return self.chained_obj_for(a, want_edges) + + a_is_subdivided = (a.v.size == self.mtx.shape[0]) + + # if we get here, "a" is a mesh + if want_edges: + if a_is_subdivided: + return self.remeshed_vtx_to_remeshed_edge_mtx.dot(col(a.v)).reshape((-1, 3)) + else: + return self.vtx_to_edge_mtx.dot(col(a.v)).reshape((-1, 3)) + else: + if a_is_subdivided: + return a # nothing to do! + else: + result = Mesh(v=self.mtx.dot(col(a.v)).reshape((-1, 3)), f=self.faces.copy()) + if hasattr(a, 'segm'): + result.transfer_segm(a) + if hasattr(a, 'landm'): + result.landm = dict([(k, np.argmin(np.sum((result.v - row(a.v[v])) ** 2, axis=1))) for k, v in a.landm.items()]) + if hasattr(self, 'ft'): + result.ft = self.ft + if hasattr(self, 'vt'): + result.vt = self.vt + + return result + + def chained_obj_for(self, a, want_edges): + + from ..geometry.vert_normals import MatVecMult + + if hasattr(a, 'r'): + a_len = len(a.r) + else: + a_len = a.size + + a_is_subdivided = a_len == self.mtx.shape[0] + if a_is_subdivided and not want_edges: + return a + + if not want_edges: + mtx = self.mtx + elif a_is_subdivided: + mtx = self.remeshed_vtx_to_remeshed_edge_mtx + else: + mtx = self.vtx_to_edge_mtx + + return MatVecMult(mtx, a) diff --git a/mesh-master/mesh/topology/subdivision.py b/mesh-master/mesh/topology/subdivision.py new file mode 100644 index 0000000000000000000000000000000000000000..fe51d571e3bd26e51910d3ba242d8921c671dabc --- /dev/null +++ b/mesh-master/mesh/topology/subdivision.py @@ -0,0 +1,148 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2012 Max Planck Society. All rights reserved. +# Created by Matthew Loper on 2012-10-28. + +from ..utils import row +from .linear_mesh_transform import LinearMeshTransform +from .connectivity import get_vert_connectivity, get_vertices_per_edge, get_vert_opposites_per_edge + +import numpy as np +import scipy.sparse as sp + + +def loop_subdivider(mesh): + + IS = [] + JS = [] + data = [] + + vc = get_vert_connectivity(mesh) + ve = get_vertices_per_edge(mesh) + vo = get_vert_opposites_per_edge(mesh) + + if hasattr(mesh, 'ft') and hasattr(mesh, 'vt'): + from ..mesh import Mesh + flat_mesh = Mesh(v=mesh.vt, f=mesh.ft) + vt_start = len(flat_mesh.v) + vt_edge_to_midpoint = {} + vt_e = get_vertices_per_edge(flat_mesh) + vt = flat_mesh.v[:, :2].tolist() + for idx, vs in enumerate(vt_e): + vsl = list(vs) + vsl.sort() + vt_edge_to_midpoint[(vsl[0], vsl[1])] = vt_start + idx + vt_edge_to_midpoint[(vsl[1], vsl[0])] = vt_start + idx + vt.append((np.array(vt[vsl[0]]) + np.array(vt[vsl[1]])) / 2.) + vt = np.array(vt) + + if True: + # New values for each vertex + for idx in range(len(mesh.v)): + + # find neighboring vertices + nbrs = np.nonzero(vc[:, idx])[0] + + nn = len(nbrs) + + #if nn <=3: # ==3 might give problems when meshes are not water-tight + if nn == 3: + wt = 3. / 16. + elif nn > 3: + wt = 3. / (8. * nn) + else: + raise Exception('nn should be 3 or more') + for nbr in nbrs: + IS.append(idx) + JS.append(nbr) + data.append(wt) + + JS.append(idx) + IS.append(idx) + data.append(1. - (wt * nn)) + + start = len(mesh.v) + edge_to_midpoint = {} + + if True: + # New values for each edge: + # new edge verts depend on the verts they span + for idx, vs in enumerate(ve): + + vsl = list(vs) + vsl.sort() + IS.append(start + idx) + IS.append(start + idx) + JS.append(vsl[0]) + JS.append(vsl[1]) + data.append(3. / 8) + data.append(3. / 8) + + opposites = vo[(vsl[0], vsl[1])] + IS.append(start + idx) + IS.append(start + idx) + JS.append(opposites[0]) + JS.append(opposites[1]) + data.append(1. / 8) + data.append(1. / 8) + + edge_to_midpoint[(vsl[0], vsl[1])] = start + idx + edge_to_midpoint[(vsl[1], vsl[0])] = start + idx + + f = [] + if hasattr(mesh, 'ft'): + ft = [] + + for f_i, old_f in enumerate(mesh.f): + ff = np.concatenate((old_f, old_f)) + ftft = np.concatenate((mesh.ft[f_i], mesh.ft[f_i])) if hasattr(mesh, 'ft') else [] + + for i in range(3): + v0 = edge_to_midpoint[(ff[i], ff[i + 1])] + v1 = ff[i + 1] + v2 = edge_to_midpoint[(ff[i + 1], ff[i + 2])] + f.append(row(np.array([v0, v1, v2]))) + if len(ftft): + if len(np.unique(mesh.ft[f_i])) != len(mesh.ft[f_i]): + # anomalous face + ft.append(row(np.array([0, 0, 0]))) + else: + e_v0 = vt_edge_to_midpoint[(ftft[i], ftft[i + 1])] + e_v1 = ftft[i + 1] + e_v2 = vt_edge_to_midpoint[(ftft[i + 1], ftft[i + 2])] + ft.append(row(np.array([e_v0, e_v1, e_v2]))) + + v0 = edge_to_midpoint[(ff[0], ff[1])] + v1 = edge_to_midpoint[(ff[1], ff[2])] + v2 = edge_to_midpoint[(ff[2], ff[3])] + f.append(row(np.array([v0, v1, v2]))) + if len(ftft): + if len(np.unique(mesh.ft[f_i])) != len(mesh.ft[f_i]): + # anomalous face + ft.append(row(np.array([0, 0, 0]))) + else: + e_v0 = vt_edge_to_midpoint[(ftft[0], ftft[1])] + e_v1 = vt_edge_to_midpoint[(ftft[1], ftft[2])] + e_v2 = vt_edge_to_midpoint[(ftft[2], ftft[3])] + ft.append(row(np.array([e_v0, e_v1, e_v2]))) + + f = np.vstack(f) + if hasattr(mesh, 'ft'): + ft = np.vstack(ft) + + IS = np.array(IS, dtype=np.uint32) + JS = np.array(JS, dtype=np.uint32) + + if True: # for x,y,z coords + IS = np.concatenate((IS * 3, IS * 3 + 1, IS * 3 + 2)) + JS = np.concatenate((JS * 3, JS * 3 + 1, JS * 3 + 2)) + data = np.concatenate((data, data, data)) + + ij = np.vstack((IS.flatten(), JS.flatten())) + mtx = sp.csc_matrix((data, ij)) + + if hasattr(mesh, 'ft'): + return LinearMeshTransform(mtx, f, vt=vt, ft=ft) + else: + return LinearMeshTransform(mtx, f) diff --git a/mesh-master/mesh/utils.py b/mesh-master/mesh/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..054c8e0cae78da13da1346034d4fc42af8cb4c7e --- /dev/null +++ b/mesh-master/mesh/utils.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Copyright (c) 2013 Max Planck Society. All rights reserved. + + +def row(A): + return A.reshape((1, -1)) + + +def col(A): + return A.reshape((-1, 1)) + + +def sparse(i, j, data, m=None, n=None): + import numpy as np + from scipy.sparse import csc_matrix + ij = np.vstack((i.flatten().reshape(1, -1), j.flatten().reshape(1, -1))) + + if m is None: + return csc_matrix((data, ij)) + else: + return csc_matrix((data, ij), shape=(m, n)) diff --git a/mesh-master/mesh/version.py b/mesh-master/mesh/version.py new file mode 100644 index 0000000000000000000000000000000000000000..cdabec692e05f7aff2473e63063909538119fc6d --- /dev/null +++ b/mesh-master/mesh/version.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2016 Max Planck Society. All rights reserved. + +__version__ = '0.4' diff --git a/mesh-master/psbody-mesh-namespace/__init__.py b/mesh-master/psbody-mesh-namespace/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7fa8a286869b5c6665816329df9c01a30ac604e5 --- /dev/null +++ b/mesh-master/psbody-mesh-namespace/__init__.py @@ -0,0 +1,6 @@ +# this is the setup tools way +__import__('pkg_resources').declare_namespace(__name__) + +# this is the distutils way, but does not work with setuptools +#from pkgutil import extend_path +#__path__ = extend_path(__path__, __name__) diff --git a/mesh-master/requirements.txt b/mesh-master/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..1759429447928cfcb2e81f4ff181c32d51325831 --- /dev/null +++ b/mesh-master/requirements.txt @@ -0,0 +1,9 @@ +setuptools +numpy +matplotlib +scipy +pyopengl +pillow +pyzmq +pyyaml +opencv-python diff --git a/mesh-master/setup.py b/mesh-master/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..0da199ea794d80e029c9d02fc770eb704d403fb1 --- /dev/null +++ b/mesh-master/setup.py @@ -0,0 +1,281 @@ +# (c) 2015-2016 Max Planck Society +# see accompanying LICENSE.txt file for licensing and contact information + +try: + # setuptools is required + from setuptools import setup, Extension as _Extension + from setuptools.command.build_ext import build_ext as _build_ext + from setuptools.command.install import install as _install + + has_setup_tools = True +except ImportError: + from distutils.core import setup, Extension as _Extension + from distutils.command.build_ext import install as _install + + has_setup_tools = False + +from distutils.util import convert_path +from distutils.core import Command +from distutils import log +from distutils.command.sdist import sdist as _sdist + +import os + +# this package will go to the following namespace +namespace_package = 'psbody' + +# the CGAL archive +CGAL_archive = convert_path('mesh/thirdparty/CGAL-4.7.tar.gz') + + +def _get_version(): + """Convenient function returning the version of this package""" + + ns = {} + version_path = convert_path('mesh/version.py') + if not os.path.exists(version_path): + return None + with open(version_path) as version_file: + exec(version_file.read(), ns) + + log.warn('[VERSION] read version is %s', ns['__version__']) + return ns['__version__'] + + +class build_deflate_cgal(Command): + """Deflates CGal to a temporary build folder""" + + description = "deflate CGAL" + # option with '=' because it takes an argument + user_options = [('cgal-location=', None, 'specifies the location of the cgal archive (tar.gz file)'), + ] + + def initialize_options(self): + self.build_temp = None + + def finalize_options(self): + self.set_undefined_options('build', ('build_temp', 'build_temp'),) + pass + + def run(self): + + CGAL_dir_deflate = os.path.abspath(self.build_temp) + + log.info('[CGAL] deflating cgal from "%s" to "%s"', CGAL_archive, CGAL_dir_deflate) + if not os.path.exists(os.path.join(CGAL_dir_deflate, 'CGAL-4.7')): + import tarfile + os.makedirs(CGAL_dir_deflate) + + cgal_tar = tarfile.open(CGAL_archive, 'r:*') + cgal_tar.extractall(CGAL_dir_deflate) + + # create a dummy configuration file + config_file = os.path.join(CGAL_dir_deflate, 'CGAL-4.7', 'include', 'CGAL', 'compiler_config.h') + if not os.path.exists(config_file): + open(config_file, 'w') + + pass + + +class build_ext(_build_ext): + """We override the regular extension processing to add our own dependencies""" + + user_options = [('boost-location=', None, 'specifies the location of the boost folder (only include needed)'), + ] + _build_ext.user_options + + def initialize_options(self): + self.boost_location = None + return _build_ext.initialize_options(self) + + def finalize_options(self): + + self.set_undefined_options('install', ('boost_location', 'boost_location'),) + if self.boost_location is not None and self.boost_location.strip(): + # avoid empty folder name as it may happen and mess with the compiler + # + # we cannot assert that boost_location exist here, because we are + # running this code for targets that do not require compilation + # such as sdist + + # check for subfolders in the boost-x-yy-z sense + # check for env variables + self.boost_location = os.path.expanduser(self.boost_location) + + return _build_ext.finalize_options(self) + + def build_extension(self, ext): + """Adds the necessary include folders""" + + # should be possible to have boost on the system + # assert(self.boost_location is not None), 'the boost location should be provided with the option "--boost-location"' + + ext.include_dirs += [os.path.join(os.path.abspath(self.build_temp), 'CGAL-4.7', 'include')] + if self.boost_location is not None: + ext.include_dirs += [self.boost_location] + + # Remove empty paths + filtered = [] + for in_dir in filter(None, ext.include_dirs): + filtered.append(in_dir) + ext.include_dirs = filtered + + return _build_ext.build_extension(self, ext) + + def run(self): + """Runs the dependant targets""" + # the 1 at the end construct the object always, even if not specified on + # the command line. + build_deflate_cgal = self.get_finalized_command('build_deflate_cgal', 1) + build_deflate_cgal.run() + + return _build_ext.run(self) + + # see subcommands documentation in the original Command class + sub_commands = [('build_deflate_cgal', None)] + _build_ext.sub_commands + + +class install(_install): + """We override the regular extension processing to add our own dependencies""" + + user_options = [('boost-location=', None, 'specifies the location of the boost folder (only include needed)'), + ] + _install.user_options + + def initialize_options(self): + self.boost_location = None + return _install.initialize_options(self) + + def finalize_options(self): + + # if self.boost_location is not None: + # self.boost_location = os.path.expanduser(self.boost_location) + + return _install.finalize_options(self) + + +class sdist(_sdist): + """Modified source distribution that adds the CGAL distribution to the generated package""" + + def get_file_list(self): + """Extends the file list read from the manifest with the sources of Yayi""" + + _sdist.get_file_list(self) + + # including the CGal archive without being forced to use the Manifest + self.filelist.append(CGAL_archive) + + # distributing the tests files without being forced to use the Manifest + for i in os.listdir(convert_path('tests')): + if os.path.splitext(i)[1] == ".py": + self.filelist.append(convert_path(os.path.join('tests', i))) + + log.info('[SDIST] file list is:') + for f in self.filelist.files: + log.info('[SDIST] \t"%s"', f) + + return + + +def _get_all_extensions(): + try: + import numpy + except: + return [] + + # valid only for gcc/clang + extra_args = ['-O3'] + + import sys + if sys.platform.find('linux') > -1: + extra_args += ['-fopenmp'] # openmp not supported on OSX + + define_macros = [('NDEBUG', '1')] + + define_macros_mesh_ext_without_cgal_link = [ + ('CGAL_NDEBUG', 1), + ('MESH_CGAL_AVOID_COMPILED_VERSION', 1), + ('CGAL_HAS_NO_THREADS', 1), + ('CGAL_NO_AUTOLINK_CGAL', 1) + ] + + undef_macros = [] + + package_name_and_srcs = [('aabb_normals', ['mesh/src/aabb_normals.cpp'], define_macros_mesh_ext_without_cgal_link), + ('spatialsearch', ['mesh/src/spatialsearchmodule.cpp'], define_macros_mesh_ext_without_cgal_link), + ('visibility', ['mesh/src/py_visibility.cpp', 'mesh/src/visibility.cpp'], define_macros_mesh_ext_without_cgal_link), + ('serialization.plyutils', ['mesh/src/plyutils.c', 'mesh/src/rply.c'], []), + ('serialization.loadobj', ['mesh/src/py_loadobj.cpp'], []), + ] + + out = [] + + for current_package_name, src_list, additional_defines in package_name_and_srcs: + ext = _Extension("%s.mesh.%s" % (namespace_package, current_package_name), + src_list, + language="c++", + include_dirs=['mesh/src', numpy.get_include()], + libraries=[], + define_macros=define_macros + additional_defines, + undef_macros=undef_macros, + extra_compile_args=extra_args, + extra_link_args=extra_args) + + out += [ext] + + return out + +all_extensions = _get_all_extensions() + +additional_kwargs = {} +if has_setup_tools: + # setup tools required for the 'setup_requires' ... + additional_kwargs['setup_requires'] = ['setuptools', 'numpy'] + additional_kwargs['install_requires'] = [ + 'numpy >= 1.8', + 'opencv-python', + 'pillow', + 'pyopengl', + 'pyyaml', + 'pyzmq', + 'scipy', + ] + additional_kwargs['zip_safe'] = not all_extensions + additional_kwargs['test_suite'] = "tests" + additional_kwargs['namespace_packages'] = [namespace_package] + +cmdclass = {'build_ext': build_ext, + 'build_deflate_cgal': build_deflate_cgal, + 'sdist': sdist, + 'install': install} + +# check if the namespace works for python >= 3.3 +packages = [namespace_package, + '%s.mesh' % namespace_package, + '%s.mesh.topology' % namespace_package, + '%s.mesh.geometry' % namespace_package, + '%s.mesh.serialization' % namespace_package + ] # actual subpackage described here + +package_dir = {namespace_package: '%s-mesh-namespace' % namespace_package, + '%s.mesh' % namespace_package: 'mesh', # actual subpackage described here + '%s.mesh.topology' % namespace_package: 'mesh/topology', + '%s.mesh.geometry' % namespace_package: 'mesh/geometry', + '%s.mesh.serialization' % namespace_package: 'mesh/serialization', + } + +setup(name='%s-mesh' % namespace_package, + version=_get_version(), + packages=packages, + package_dir=package_dir, + ext_modules=all_extensions, + author='Max Planck Perceiving Systems - Body Group', + maintainer='Jean-Claude Passy', + maintainer_email='jean-claude.passy@tuebingen.mpg.de', + url='http://ps.is.tuebingen.mpg.de', + description='Mesh and MeshViewer utilities', + license='See LICENSE.txt', + cmdclass=cmdclass, + scripts=[ + "bin/meshviewer" + ], + ** additional_kwargs + ) diff --git a/mesh-master/tests/__init__.py b/mesh-master/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..63e6fd338284ae564cfd6f14da7bf855700574d2 --- /dev/null +++ b/mesh-master/tests/__init__.py @@ -0,0 +1,12 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2013 Max Planck Society. All rights reserved. + +import tempfile +from os.path import abspath, dirname, join + +test_data_folder = abspath(join(dirname(__file__), '..', 'data', 'unittest')) + +# folder used for creating temporary files +temporary_files_folder = tempfile.gettempdir() diff --git a/mesh-master/tests/test_aabb_n_tree.py b/mesh-master/tests/test_aabb_n_tree.py new file mode 100644 index 0000000000000000000000000000000000000000..62c620a139d9ff968569ecb8e7a3aa5322714924 --- /dev/null +++ b/mesh-master/tests/test_aabb_n_tree.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Copyright (c) 2014 Max Planck Society. All rights reserved. + +import os +import numpy as np +import unittest + +from . import test_data_folder +from psbody.mesh.mesh import Mesh +from psbody.mesh.geometry.tri_normals import TriToScaledNormal, NormalizeRows +import psbody.mesh.aabb_normals as aabb_normals + + +class TestAABBNormal(unittest.TestCase): + + def setUp(self): + simpleobjpath = os.path.join(test_data_folder, 'test_doublebox.obj') + self.simple_m = Mesh(filename=simpleobjpath) + cylinderpath = os.path.join(test_data_folder, 'cylinder.obj') + self.cylinder_m = Mesh(filename=cylinderpath) + cylinder_trans_path = os.path.join(test_data_folder, 'cylinder_trans.obj') + self.cylinder_trans_m = Mesh(filename=cylinder_trans_path) + self_int_cyl_path = os.path.join(test_data_folder, 'self_intersecting_cyl.obj') + self.self_int_cyl_m = Mesh(filename=self_int_cyl_path) + + # error_p = ||p - q|| + eps*(1 - p_n*p_q) + # therefore, eps=0 should give the classic NN + def test_dist_classic(self): + tree_handle = aabb_normals.aabbtree_n_compute(self.simple_m.v, + self.simple_m.f.astype(np.uint32).copy(), + 0.0) + query_v = np.array([[0.5, 0.1, 0.25], + [0.5, 0.1, 0.25]]) + query_n = np.array([[0.0, 1.0, 0.0], + [1.0, 0.0, 0.0]]) + closest_tri, closest_p = aabb_normals.aabbtree_n_nearest(tree_handle, query_v, query_n) + self.assertTrue((closest_tri == np.array([[0, 0]])).all()) + self.assertTrue((closest_p == query_v).all()) + + def test_dist_normals(self): + tree_handle = aabb_normals.aabbtree_n_compute(self.simple_m.v, + self.simple_m.f.astype(np.uint32).copy(), + 0.5) + query_v = np.array([[0.5, 0.1, 0.25], + [0.5, 0.1, 0.25]]) + query_n = np.array([[0.0, 1.0, 0.0], + [1.0, 0.0, 0.0]]) + closest_tri, closest_p = aabb_normals.aabbtree_n_nearest(tree_handle, query_v, query_n) + self.assertTrue((closest_tri == np.array([[2, 0]])).all()) + self.assertTrue((closest_p == np.array([[0.5, 0.5, 0.25], + [0.5, 0.1, 0.25]])).all()) + + def test_cylinders(self): + create_tree = lambda eps: aabb_normals.aabbtree_n_compute(self.cylinder_m.v, + self.cylinder_m.f.astype(np.uint32).copy(), + eps) + tree_handle_no_normals = create_tree(0) + tree_handle_normals = create_tree(10) + + query_v = self.cylinder_trans_m.v + + tri_n = NormalizeRows(TriToScaledNormal(self.cylinder_trans_m.v, self.cylinder_trans_m.f)) + + query_n = np.zeros(self.cylinder_trans_m.v.shape) + for i_f in range(self.cylinder_trans_m.f.shape[0]): + query_n[self.cylinder_trans_m.f[i_f, :], :] += tri_n[i_f, :] + query_n = NormalizeRows(query_n) + + closest_tri, _ = aabb_normals.aabbtree_n_nearest(tree_handle_no_normals, query_v, query_n) + # all closest triangles are the two extremes + self.assertTrue(np.unique(closest_tri).shape[0] <= 4) + + closest_tri_n, _ = aabb_normals.aabbtree_n_nearest(tree_handle_normals, query_v, query_n) + # there are four triangles that do not need to be reached, in the center and in the extremes + self.assertTrue(np.unique(closest_tri_n).shape[0] >= (self.cylinder_m.f.shape[0] - 4)) + + def test_selfintersects(self): + tree_handle_no = aabb_normals.aabbtree_n_compute(self.simple_m.v, + self.simple_m.f.astype(np.uint32).copy(), + 0.5) + + self.assertTrue(aabb_normals.aabbtree_n_selfintersects(tree_handle_no) == 0) + + tree_handle_yes = aabb_normals.aabbtree_n_compute(self.self_int_cyl_m.v, + self.self_int_cyl_m.f.astype(np.uint32).copy(), + 0.5) + + self.assertTrue(aabb_normals.aabbtree_n_selfintersects(tree_handle_yes) == (2 * 8)) diff --git a/mesh-master/tests/test_arcball.py b/mesh-master/tests/test_arcball.py new file mode 100644 index 0000000000000000000000000000000000000000..3e533e66c14ed17cc51168787db72bdd10049cae --- /dev/null +++ b/mesh-master/tests/test_arcball.py @@ -0,0 +1,74 @@ + +import unittest +import copy +import numpy as np + +from psbody.mesh.arcball import Matrix3fT, Matrix4fT, ArcBallT, \ + Point2fT, Matrix3fSetRotationFromQuat4f, Matrix3fMulMatrix3f, Matrix4fSetRotationFromMatrix3f +from numpy import double + + +class TestArcball(unittest.TestCase): + + def test_arcball(self): + # Unit testing of the ArcBall class and the real math behind it. + # Simulates a click and drag followed by another click and drag. + + Transform = Matrix4fT() + ThisRot = Matrix3fT() + + ArcBall = ArcBallT(640, 480) + + # First click + LastRot = copy.copy(ThisRot) + mouse_pt = Point2fT(500, 250) + ArcBall.click(mouse_pt) + + # First drag + mouse_pt = Point2fT(475, 275) + ThisQuat = ArcBall.drag(mouse_pt) + np.testing.assert_almost_equal(ThisQuat, [0.08438914, -0.08534209, -0.06240178, 0.99080837]) + + # + ThisRot = Matrix3fSetRotationFromQuat4f(ThisQuat) + # Linear Algebra matrix multiplication A = old, B = New : C = A * B + ThisRot = Matrix3fMulMatrix3f(LastRot, ThisRot) + Transform = Matrix4fSetRotationFromMatrix3f(Transform, ThisRot) + + first_transform_gt = np.array([[0.97764552, -0.1380603, 0.15858325, 0.], + [0.10925253, 0.97796899, 0.17787792, 0.], + [-0.17964739, -0.15657592, 0.97119039, 0.], + [0., 0., 0., 1., ]], + dtype=np.double) + np.testing.assert_almost_equal(Transform, first_transform_gt) + + # Done with first drag + + # second click + LastRot = copy.copy(ThisRot) + lastrot_gt = np.array([[0.97764552, -0.1380603, 0.15858325], + [0.10925253, 0.97796899, 0.17787792], + [-0.17964739, -0.15657592, 0.97119039]], + dtype=double) + np.testing.assert_almost_equal(LastRot, lastrot_gt) + + mouse_pt = Point2fT(350, 260) + ArcBall.click(mouse_pt) + # second drag + mouse_pt = Point2fT(450, 260) + ThisQuat = ArcBall.drag(mouse_pt) + + np.testing.assert_almost_equal(ThisQuat, [0.00710336, 0.31832787, 0.02679029, 0.94757545]) + + ThisRot = Matrix3fSetRotationFromQuat4f(ThisQuat) + ThisRot = Matrix3fMulMatrix3f(LastRot, ThisRot) + # print ThisRot + Transform = Matrix4fSetRotationFromMatrix3f(Transform, ThisRot) + + second_transform_gt = np.array([[0.88022292, -0.08322023, -0.46720669, 0.], + [0.14910145, 0.98314685, 0.10578787, 0.], + [0.45052907, -0.16277808, 0.8777966, 0.], + [0., 0., 0., 1.00000001]], + dtype=double) + + np.testing.assert_almost_equal(Transform, second_transform_gt) diff --git a/mesh-master/tests/test_geometry.py b/mesh-master/tests/test_geometry.py new file mode 100644 index 0000000000000000000000000000000000000000..dba77bba87a2c9a7acf52d3404b9b2840ecced16 --- /dev/null +++ b/mesh-master/tests/test_geometry.py @@ -0,0 +1,145 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2012 Max Planck Society. All rights reserved. + +import os + +import numpy as np +import unittest + +from . import test_data_folder +from unittest.case import skipUnless + +try: + import cv2 + has_cv2 = True +except ImportError: + has_cv2 = False + + +class TestGeometry(unittest.TestCase): + + @skipUnless(has_cv2, 'skipping tests requiring OpenCV') + def test_rodrigues(self): + from psbody.mesh.geometry.rodrigues import rodrigues + + test_data = ( + np.array([0, 0, 0], dtype=np.double), + np.array([1, -1, 0.5], dtype=np.double), + np.array([[1, -1, 0.5]], dtype=np.double), + np.array([[1, -1, 0.5]], dtype=np.double).T, + np.eye(3, dtype=np.double), + np.array([[1, 0, 0], [0, -1, 0], [0, 0, -1]], dtype=np.double), + np.array([[0.22629564, 0.95671228, -0.18300792], + [-0.18300792, 0.22629564, 0.95671228], + [0.95671228, -0.18300792, 0.22629564]], dtype=np.double), + ) + for r_in in test_data: + true_r, true_dr = cv2.Rodrigues(r_in) + our_r, our_dr = rodrigues(r_in) + np.testing.assert_array_almost_equal(our_r, true_r, verbose=True) + np.testing.assert_array_almost_equal(our_dr, true_dr, verbose=True) + + def test_cross_product(self): + from psbody.mesh.geometry.cross_product import CrossProduct + + npts = 6 + a = np.random.randn(3 * npts) + b = np.random.randn(3 * npts) + + c = CrossProduct(a, b) + + # this should be close to (or exactly) zero + our_answer = c.flatten() + numpy_answer = np.cross(a.reshape(-1, 3), b.reshape(-1, 3)).flatten() + + self.assertTrue(max(abs(our_answer - numpy_answer)) < 1e-15) + + def test_vert_normals(self): + from psbody.mesh.geometry.vert_normals import VertNormals + from psbody.mesh.mesh import Mesh + mesh = Mesh(filename=os.path.join(test_data_folder, 'sphere.ply')) + pred = VertNormals(mesh.v, mesh.f) + + vn_obs = mesh.estimate_vertex_normals().reshape((-1, 3)) + vn_pred = pred.reshape((-1, 3)) + + self.assertTrue(np.max(np.abs(vn_pred.flatten() - vn_obs.flatten())) < 1e-15) + + def test_barycentric_coordinates_of_projection(self): + """Tests backwards compatibility with old matlab + function of the same name.""" + from psbody.mesh.geometry.barycentric_coordinates_of_projection import barycentric_coordinates_of_projection + + p = np.array([[-120, 48, -30, 88, -80], + [71, 102, 29, -114, -291], + [161, 72, -78, -106, 142]]).T + + q = np.array([[32, -169, 32, -3, 108], + [-75, -10, 31, -16, 110], + [136, -24, -86, 62, -86]]).T + + u = np.array([[8, -1, 37, -108, 109], + [-120, 152, -22, 3, 153], + [-110, -76, 111, 55, 9]]).T + + v = np.array([[-148, 233, -19, -139, -18], + [-73, -61, 88, -141, -19], + [-105, 74, -76, 48, 141]]).T + + b = np.array([[1.5266, -0.8601, 1.3245, 2.4450, 1.3452], + [-1.5346, 0.8556, -0.1963, -2.1865, -2.0794], + [1.0080, 1.0046, -0.1282, 0.7415, 1.7342]]).T + + b_est = barycentric_coordinates_of_projection(p, q, u, v) + self.assertTrue(np.max(np.abs(b_est.flatten('F') - b.flatten('F'))) < 1e-3) + + p = p[0, :] + q = q[0, :] + u = u[0, :] + v = v[0, :] + b = b[0, :] + + b_est = barycentric_coordinates_of_projection(p, q, u, v) + self.assertTrue(np.max(np.abs(b_est.flatten('F') - b.flatten('F'))) < 1e-3) + + @unittest.skipIf( + not os.path.isfile(os.path.join(test_data_folder, 'female_template.ply')), + 'No data file.') + def test_trinormal(self): + + from psbody.mesh.mesh import Mesh + from psbody.mesh.geometry.tri_normals import TriNormals, TriToScaledNormal, TriNormalsScaled, NormalizeRows + + m = Mesh(filename=os.path.join(test_data_folder, 'female_template.ply')) + + # Raffi: I do not know what this thing is supposed to test, maybe stability over some noise... + tn = TriNormals(m.v, m.f) + tn2 = NormalizeRows(TriToScaledNormal(m.v, m.f)) + + eps = 1e-8 + mvc = m.v.copy() + mvc[0] += eps + tn_b = TriNormals(mvc, m.f) + tn2_b = NormalizeRows(TriToScaledNormal(mvc, m.f)) + # our TriNormals empirical: sp.csc_matrix(tn_b.flatten() - tn.flatten()) / eps + # old TriToScaledNormals empirical': sp.csc_matrix(tn2_b.flatten() - tn2.flatten()) / eps + + # apparently just for printing sparsly + # import scipy.sparse as sp + # print sp.csc_matrix(tn_b.flatten() - tn.flatten()) / eps + np.testing.assert_almost_equal(tn_b.flatten() - tn.flatten(), + tn2_b.flatten() - tn2.flatten()) + + tn = TriNormalsScaled(m.v, m.f) + tn2 = TriToScaledNormal(m.v, m.f) + eps = 1e-8 + mvc = m.v.copy() + mvc[0] += eps + + tn_b = TriNormalsScaled(mvc, m.f) + tn2_b = TriToScaledNormal(mvc, m.f) + + np.testing.assert_almost_equal(tn_b.flatten() - tn.flatten(), + tn2_b.flatten() - tn2.flatten()) diff --git a/mesh-master/tests/test_intersections.py b/mesh-master/tests/test_intersections.py new file mode 100644 index 0000000000000000000000000000000000000000..a5c35ab0ef1a60cf777ebad13f2124fea43ce12e --- /dev/null +++ b/mesh-master/tests/test_intersections.py @@ -0,0 +1,35 @@ +""" +Unit Tests +---------- + +Unit test for the mesh_intersections module. +The result is the list of indices of the intersecting faces + +""" +import unittest + + +class TestMeshIntersection(unittest.TestCase): + + def test_spheres_intersection(self): + # deactiavate test temporally + pass + + ''' + from psbody.mesh.sphere import Sphere + qm = Sphere(np.asarray([-1, 0, 0]), 2).to_mesh() + m = Sphere(np.asarray([1, 0, 0]), 2).to_mesh() + + t = m.compute_aabb_tree() + + faces_index = t.intersections_indices(qm.v, qm.f) + + ref_faces_index = [2, 4, 5, 6, 16, 25, 26, 27, 36, 37, 38, 40, 58, 60, 61, 63, 76, 77, 79] + + test = True + for i in range(len(faces_index)): + if faces_index[i] != ref_faces_index[i]: + test = False + + self.assertTrue(test) + ''' diff --git a/mesh-master/tests/test_mesh.py b/mesh-master/tests/test_mesh.py new file mode 100644 index 0000000000000000000000000000000000000000..022cda24a954d5922bcab913a1c6ea185857f799 --- /dev/null +++ b/mesh-master/tests/test_mesh.py @@ -0,0 +1,180 @@ +import unittest +import numpy as np +import tempfile +import os +import shutil +from os.path import join as pjoin + +from psbody.mesh.mesh import Mesh +from psbody.mesh.errors import MeshError, SerializationError + +from .unittest_extensions import ExtendedTest + +from . import test_data_folder + + +class TestMesh(ExtendedTest): + + def setUp(self): + self.box_v = np.array([[0.5, -0.5, 0.5, -0.5, 0.5, -0.5, 0.5, -0.5], [0.5, 0.5, -0.5, -0.5, 0.5, 0.5, -0.5, -0.5], [0.5, 0.5, 0.5, 0.5, -0.5, -0.5, -0.5, -0.5]]).T + self.box_f = np.array([[0, 1, 2], [3, 2, 1], [0, 2, 4], [6, 4, 2], [0, 4, 1], [5, 1, 4], [7, 5, 6], [4, 6, 5], [7, 6, 3], [2, 3, 6], [7, 3, 5], [1, 5, 3]]) + self.box_segm = {'a': np.array(range(6), dtype=np.uint32), + 'b': np.array([6, 10, 11], dtype=np.uint32), + 'c': np.array([7, 8, 9], dtype=np.uint32)} + self.landm = {'pospospos': 0, + 'negnegneg': 7} + self.landm_xyz = {'pospospos': np.array([0.5, 0.5, 0.5]), + 'negnegneg': np.array([-0.5, -0.5, -0.5])} + self.test_obj_path = pjoin(test_data_folder, "test_box.obj") + self.test_ply_path = pjoin(test_data_folder, "test_box.ply") + self.test_bin_ply_path = pjoin(test_data_folder, "test_box_le.ply") + self.test_bad_ply_path = pjoin(test_data_folder, "test_ascii_bad_endings.ply") + self.test_pp_path = pjoin(test_data_folder, "test_box.pp") + self.test_sphere_path = pjoin(test_data_folder, "sphere.ply") + + def test_load_obj(self): + m = Mesh(filename=self.test_obj_path) + self.assertTrue((m.v == self.box_v).all()) + self.assertTrue((m.f == self.box_f).all()) + self.assertDictOfArraysEqual(m.segm, self.box_segm) + self.assertEqual(m.landm, self.landm) + self.assertDictOfArraysEqual(m.landm_xyz, self.landm_xyz) + + def test_load_ply(self): + m = Mesh(filename=self.test_ply_path, ppfilename=self.test_pp_path) + self.assertTrue((m.v == self.box_v).all()) + self.assertTrue((m.f == self.box_f).all()) + self.assertTrue(m.landm == self.landm) + + def test_ascii_bad_ply(self): + """Ensure that the proper exception is raised when a file fails to be read.""" + with self.assertRaisesRegex(SerializationError, 'Failed to open PLY file\.'): + Mesh(filename=self.test_bad_ply_path) + + # The next two tests are unnecessary, + # just demonstrating the exception hierarchy: + with self.assertRaises(MeshError): + Mesh(filename=self.test_bad_ply_path) + + with self.assertRaises(Exception): + Mesh(filename=self.test_bad_ply_path) + + def test_raw_initialization(self): + m = Mesh(v=self.box_v, f=self.box_f) + self.assertTrue((m.v == self.box_v).all()) + self.assertTrue((m.f == self.box_f).all()) + + def test_writing_ascii_ply(self): + m = Mesh(filename=self.test_ply_path) + (_, tempname) = tempfile.mkstemp() + m.write_ply(tempname, ascii=True) + with open(tempname, 'r') as f: + candidate = f.read() + os.remove(tempname) + with open(self.test_ply_path, 'r') as f: + truth = f.read() + self.assertEqual(candidate, truth) + + def test_writing_bin_ply(self): + m = Mesh(filename=self.test_ply_path) + (_, tempname) = tempfile.mkstemp() + m.write_ply(tempname) + with open(tempname, 'rb') as f: + candidate = f.read() + os.remove(tempname) + with open(self.test_bin_ply_path, 'rb') as f: + truth = f.read() + self.assertEqual(candidate, truth) + + def test_aabb_tree(self): + v_src = np.array([[-36, 37, 8], [5, -36, 35], [12, -15, 1], [-10, -42, -26], [-38, -32, -26], [-8, -45, 40], [44, -1, -1], [-16, 40, -13], + [-39, 28, -11], [-26, -10, -40], [-37, 44, 46], [8, -44, -27], [-15, 32, -48], [-46, -33, 15], [23, 15, -5], + [5, -20, 24], [-31, 19, -32], [-13, 13, 28], [-42, 43, 28], [-1, -6, -5]]) + f_src = np.array([[12, 16, 17], [5, 10, 1], [13, 19, 7], [13, 1, 5], [14, 8, 16], [9, 2, 8], [1, 19, 18], [4, 0, 3], [18, 15, 5], [3, 16, 2]]) + + m = Mesh(v=v_src, f=f_src) + t = m.compute_aabb_tree() + + v_query = np.array([[-19, 1, 1], [32, 29, 14], [-12, 31, 3], [-15, 44, 38], [5, 12, 9]]) + + v_expected = np.array([[-19.678178, 0.364208, -1.384218], [23.000000, 15.000000, -5.000000], [-13.729523, 19.930467, 0.278131], [-31.869765, 34.228123, 44.656367], [7.794764, 18.188195, -6.471474]]) + f_expected = np.array([2, 4, 0, 1, 4]) + + f_est, v_est = t.nearest(v_query) + + diff1 = abs(f_est - f_expected) + diff2 = abs(v_est - v_expected) + + self.assertTrue(max(diff1.flatten()) < 1e-6) + self.assertTrue(max(diff2.flatten()) < 1e-6) + + def test_estimate_vertex_normals(self): + # normals of a sphere should be scaled versions of the vertices + m = Mesh(filename=self.test_sphere_path) + m.v -= np.mean(m.v, axis=0) + rad = np.linalg.norm(m.v[0]) + vn = np.array(m.estimate_vertex_normals()) + mse = np.mean(np.sqrt(np.sum((vn - m.v / rad) ** 2, axis=1))) + self.assertTrue(mse < 0.05) + + @unittest.skipIf( + not os.path.isfile(os.path.join(test_data_folder, 'textured_mean_scape_female.obj')), + 'No data file.') + def test_landmark_loader(self): + scan_fname = pjoin(test_data_folder, 'csr0001a.ply') + scan_lmrk = pjoin(test_data_folder, 'csr0001a.lmrk') + template_fname = pjoin(test_data_folder, 'textured_mean_scape_female.obj') + template_pp = pjoin(test_data_folder, 'template_caesar_picked_points.pp') + scan = Mesh(filename=scan_fname, lmrkfilename=scan_lmrk) + template = Mesh(filename=template_fname, ppfilename=template_pp) + + # Detecting CAESAR lmrk files: + m = Mesh(filename=scan_fname, landmarks=scan_lmrk) + self.assertEqual(m.landm, scan.landm) + self.assertDictOfArraysEqual(m.landm_xyz, scan.landm_xyz) + + # Detecting Meshlab pp file + m = Mesh(filename=template_fname, landmarks=template_pp) + self.assertEqual(m.landm, template.landm) + self.assertDictOfArraysAlmostEqual(m.landm_xyz, template.landm_xyz) + + del template.landm_regressors + + def test(landmarks): + m = Mesh(filename=template_fname, landmarks=landmarks) + self.assertEqual(m.landm, template.landm) + self.assertDictOfArraysAlmostEqual(m.landm_xyz, template.landm_xyz) + + import json + import yaml + import pickle + tmp_dir = tempfile.mkdtemp('bodylabs-test') + test_files = [ + (yaml, os.path.join(tmp_dir, 'landmarks.yaml'), 'w'), + (yaml, os.path.join(tmp_dir, 'landmarks.yml'), 'w'), + (json, os.path.join(tmp_dir, 'landmarks.json'), 'w'), + (pickle, os.path.join(tmp_dir, 'landmarks.pkl'), 'wb'), + ] + test_data_ind = dict((n, int(v)) for n, v in template.landm.items()) + test_data_xyz = dict((n, v.tolist()) for n, v in template.landm_xyz.items()) + for loader, filename, mode in test_files: + with open(filename, mode) as fd: + loader.dump(test_data_ind, fd) + test(filename) + with open(filename, mode) as fd: + loader.dump(test_data_xyz, fd) + test(filename) + + shutil.rmtree(tmp_dir, ignore_errors=True) + + test(template.landm) + test(template.landm_xyz) + + m = Mesh(filename=template_fname, landmarks=[0, 1, 2]) + self.assertEqual(m.landm, {'0': 0, '1': 1, '2': 2}) + + m = Mesh(filename=template_fname, landmarks=[template.v[0], template.v[7]]) + self.assertDictOfArraysAlmostEqual(m.landm_xyz, {'0': template.v[0], '1': template.v[7]}) + + m = Mesh(filename=template_fname, landmarks=[template.v[0].tolist(), template.v[7].tolist()]) + self.assertDictOfArraysAlmostEqual(m.landm_xyz, {'0': template.v[0], '1': template.v[7]}) diff --git a/mesh-master/tests/test_meshviewer.py b/mesh-master/tests/test_meshviewer.py new file mode 100644 index 0000000000000000000000000000000000000000..0ac8c76f5b5c2348287c884528a55a1799af728c --- /dev/null +++ b/mesh-master/tests/test_meshviewer.py @@ -0,0 +1,79 @@ +import itertools +import multiprocessing +import os +import random +import socket +import time +import unittest + +from psbody.mesh.mesh import Mesh +from psbody.mesh.meshviewer import ( + MeshViewers, + MeshViewerRemote, + ZMQ_PORT_MIN, + ZMQ_PORT_MAX) + +from . import test_data_folder + + +class TestMeshViewer(unittest.TestCase): + """Check the MeshViewer class.""" + + def setUp(self): + + fnames = [os.path.join(test_data_folder, i) for i in os.listdir( + test_data_folder) if os.path.splitext(i)[1].lower() == '.ply'] + + # We build a cycle to make sure we have enough meshes + self.meshes = itertools.cycle(Mesh(filename=fname) for fname in fnames) + + self.mvs = MeshViewers(shape=[2, 2]) + self.mvs[0][0].set_static_meshes([next(self.meshes)]) + self.mvs[0][1].set_static_meshes([next(self.meshes)]) + self.mvs[1][0].set_static_meshes([next(self.meshes)]) + self.mvs[1][1].set_static_meshes([next(self.meshes)]) + + def test_launch_smoke_test(self): + """this test just opens a mesh window, waits, and kills the window""" + + print('keeping MeshViewer alive for 10 seconds..') + time.sleep(1) + print('killing MeshViewer and exiting...') + + def test_snapshot(self): + """test snapshots from mesh windows""" + + import tempfile + with tempfile.NamedTemporaryFile(suffix='.png', prefix='test_snapshot') as f: + self.mvs[0][0].save_snapshot(f.name) + self.assertTrue(os.path.isfile(f.name)) + + +class TestRemoteMeshViewer(unittest.TestCase): + def is_port_open(self, port): + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + try: + sock.connect_ex(("0.0.0.0", port)) + sock.settimeout(0.1) + return True + except: + return False + finally: + sock.close() + + def pick_random_open_port(self): + while True: + port = random.randint(ZMQ_PORT_MIN, ZMQ_PORT_MAX) + if self.is_port_open(port): + return port + + def test_starting_a_remote_opens_a_port_for_listening(self): + """ + Start a MeshViewerRemote instance and verify that it's listening + for a given port. + """ + port = self.pick_random_open_port() + proc = multiprocessing.Process(target=MeshViewerRemote, kwargs={"port": port}) + self.assertTrue(self.is_port_open(port)) + if proc.is_alive(): + proc.terminate() diff --git a/mesh-master/tests/test_spheres.py b/mesh-master/tests/test_spheres.py new file mode 100644 index 0000000000000000000000000000000000000000..72d578b400e787317b644b2f68b3297a78a9bfb3 --- /dev/null +++ b/mesh-master/tests/test_spheres.py @@ -0,0 +1,15 @@ + +import unittest +import numpy as np + +from psbody.mesh.sphere import Sphere + + +class TestSphere(unittest.TestCase): + + def test_intersection_is_symmetric(self): + d = 2 + s0 = Sphere(np.array([0, 0, 0]), 1) + for dd in np.linspace(0, d, 10): + s1 = Sphere(np.array([d - dd, 0, 0]), 0.5) + self.assertAlmostEqual(s0.intersection_vol(s1), s1.intersection_vol(s0)) diff --git a/mesh-master/tests/test_topology.py b/mesh-master/tests/test_topology.py new file mode 100644 index 0000000000000000000000000000000000000000..603f44c4574a835c821920638acf515fd35e1989 --- /dev/null +++ b/mesh-master/tests/test_topology.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2013 Max Planck Society. All rights reserved. + +import numpy as np +import unittest +import os + +from . import test_data_folder, temporary_files_folder + + +class TestVisibility(unittest.TestCase): + + @unittest.skip('Too long - skipping for the moment.') + def test_qslim_smoke_test(self): + from psbody.mesh.mesh import Mesh + from psbody.mesh.topology.decimation import qslim_decimator + from psbody.mesh.geometry.triangle_area import triangle_area + + m = Mesh(filename=os.path.join(test_data_folder, 'female_template.ply')) + + ta = triangle_area(m.v, m.f) + m.set_face_colors(ta / np.max(ta)) + + qslimmer = qslim_decimator(m, factor=0.1) + m2 = qslimmer(m) + ta = triangle_area(m2.v, m2.f) + m2.set_face_colors(ta / np.max(ta)) + + +class TestLoopSubdivision(unittest.TestCase): + + @unittest.skipIf( + not os.path.isfile(os.path.join(test_data_folder, 'female_template.ply')), + 'No data file.') + def test_loop_subdivision_smoke_test(self): + from psbody.mesh import Mesh + from psbody.mesh.topology.subdivision import loop_subdivider + + m1 = Mesh(filename=os.path.join(test_data_folder, 'female_template.ply')) + sdv = loop_subdivider(m1) + + self.assertIsNotNone(sdv) + self.assertTrue(hasattr(sdv, "faces")) + + f_new = sdv.faces + + v_new = sdv(m1.v) + self.assertIsNotNone(v_new) + v_new = v_new.reshape((-1, 3)) + + v_new_want_edge = sdv(m1.v, want_edges=True) + self.assertIsNotNone(v_new_want_edge) + v_new_want_edge = v_new_want_edge.reshape((-1, 3)) + + m2 = Mesh(v=v_new, f=f_new) + + m1.reset_normals() + m2.reset_normals() + + m1.write_ply(os.path.join(temporary_files_folder, 'lowres.ply')) + m2.write_ply(os.path.join(temporary_files_folder, 'highres.ply')) + + if 0: + from psbody.mesh import MeshViewers + mvs = MeshViewers(shape=(2, 2)) + mvs[0][0].set_static_meshes([m1]) + m1.f = [] + mvs[0][1].set_static_meshes([m1]) + mvs[1][0].set_static_meshes([m2]) + m2.f = [] + mvs[1][1].set_static_meshes([m2]) + + +class TestConnectivity(unittest.TestCase): + + @unittest.skipIf( + not os.path.isfile(os.path.join(test_data_folder, 'female_template.ply')), + 'No data file.') + def test_connectivity_smoke_test(self): + + from psbody.mesh import Mesh + from psbody.mesh.topology.connectivity import get_vert_connectivity, get_faces_per_edge + m = Mesh(filename=os.path.join(test_data_folder, 'female_template.ply')) + vconn = get_vert_connectivity(m) + fpe = get_faces_per_edge(m) + + self.assertIsNotNone(vconn) + self.assertIsNotNone(fpe) diff --git a/mesh-master/tests/test_visibility.py b/mesh-master/tests/test_visibility.py new file mode 100644 index 0000000000000000000000000000000000000000..b80d6da9fc110182a7ca537f29645273b794f04a --- /dev/null +++ b/mesh-master/tests/test_visibility.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Copyright (c) 2013 MPI. All rights reserved. + +import numpy as np +import unittest +from psbody.mesh.visibility import visibility_compute + + +class TestVisibility(unittest.TestCase): + + def test_box(self): + v = np.array([[0.50, 0.50, 0.50], + [-0.5, 0.50, 0.50], + [0.50, -0.5, 0.50], + [-0.5, -0.5, 0.50], + [0.50, 0.50, -0.5], + [-0.5, 0.50, -0.5], + [0.50, -0.5, -0.5], + [-0.5, -0.5, -0.5]]) + f = np.array([[1, 2, 3], [4, 3, 2], [1, 3, 5], [7, 5, 3], + [1, 5, 2], [6, 2, 5], [8, 6, 7], [5, 7, 6], + [8, 7, 4], [3, 4, 7], [8, 4, 6], [2, 6, 4]], dtype=np.uint32) - 1 + n = v / np.linalg.norm(v[0]) + + # test considering omnidirectional cameras + vis, n_dot_cam = visibility_compute(v=v, f=f, cams=np.array([[1.0, 0.0, 0.0]])) + self.assertTrue(((v.T[0] > 0) == vis).all()) + # test considering omnidirectional cameras and minimum dot product + # between camera-vertex ray and normal .5 + vis, n_dot_cam = visibility_compute(v=v, f=f, n=n, cams=np.array([[1e10, 0.0, 0.0]])) + vis = np.logical_and(vis, n_dot_cam > .5) + self.assertTrue(((v.T[0] > 0) == vis).all()) + # test considering two omnidirectional cameras + vis, n_dot_cam = visibility_compute(v=v, f=f, cams=np.array([[0.0, 1.0, 0.0], [0.0, 0.0, 1.0]])) + self.assertTrue(((v.T[1:3] > 0) == vis).all()) + + vextra = np.array([[.9, .9, .9], + [-.9, .9, .9], + [.9, -.9, .9], + [-.9, -.9, .9]], dtype=np.double) + fextra = np.array([[1, 2, 3], [4, 3, 2]], dtype=np.uint32) - 1 + # test considering extra meshes that can block light + cams = np.array([[0.0, 0.0, 10.0]]) + vis, n_dot_cam = visibility_compute(v=v, f=f, cams=cams, extra_v=vextra, extra_f=fextra) + self.assertTrue((np.zeros_like(v.T[0]) == vis).all()) + + # test considering extra meshes that can block light, but only if the + # if the distance is at least 1.0 + vis, n_dot_cam = visibility_compute(v=v, f=f, cams=np.array([[0.0, 0.0, 10.0]]), + extra_v=vextra, extra_f=fextra, min_dist=1.0) + self.assertTrue(((v.T[2] > 0) == vis).all()) diff --git a/mesh-master/tests/unittest_extensions.py b/mesh-master/tests/unittest_extensions.py new file mode 100644 index 0000000000000000000000000000000000000000..6cfd2ff464bb282dea5f4b73cef9a83f95b8c168 --- /dev/null +++ b/mesh-master/tests/unittest_extensions.py @@ -0,0 +1,38 @@ +import unittest +import numpy as np + + +### WTF again ??? +def sigfigs(n, sf): + float('%.1g' % n) + + +class ExtendedTest(unittest.TestCase): + def assertAlmostEqual(self, a, b, places=7, msg='', delta=None, sigfigs=None): + if sigfigs is None: + super(ExtendedTest, self).assertAlmostEqual(a, b, places, msg, delta) + else: + a_ = float(('%%.%dg' % sigfigs) % a) + b_ = float(('%%.%dg' % sigfigs) % b) + if a_ != b_: + raise AssertionError(msg or "%f != %f to %d significant figures (%f != %f)" % (a, b, sigfigs, a_, b_)) + + def assertDictOfArraysEqual(self, a, b, msg=''): + self.assertIsInstance(a, dict, msg or 'First argument is not a dictionary') + self.assertIsInstance(b, dict, msg or 'Second argument is not a dictionary') + self.assertSetEqual(set(a.keys()), set(b.keys()), msg or 'Keys do not match') + for k in a.keys(): + if isinstance(a[k], np.ndarray) and isinstance(b[k], np.ndarray): + np.testing.assert_array_equal(a[k], b[k], err_msg=msg + "\nwith key [%s]" % (k)) + else: + np.testing.assert_array_equal(np.array(a[k]), np.array(b[k]), err_msg=msg + "\nwith key [%s]" % (k)) + + def assertDictOfArraysAlmostEqual(self, a, b, decimal=6, msg=''): + self.assertIsInstance(a, dict, msg or 'First argument is not a dictionary') + self.assertIsInstance(b, dict, msg or 'Second argument is not a dictionary') + self.assertSetEqual(set(a.keys()), set(b.keys()), msg or 'Keys do not match') + for k in a.keys(): + if isinstance(a[k], np.ndarray) and isinstance(b[k], np.ndarray): + np.testing.assert_array_almost_equal(a[k], b[k], decimal=decimal, err_msg=msg + "\nwith key [%s]" % (k)) + else: + np.testing.assert_array_almost_equal(np.array(a[k]), np.array(b[k]), decimal=decimal, err_msg=msg + "\nwith key [%s]" % (k)) diff --git a/mesh-master/utils/git-hooks/pre-commit b/mesh-master/utils/git-hooks/pre-commit new file mode 100644 index 0000000000000000000000000000000000000000..1f24391b3510f5f5421e7bafcda1d4e584cf1419 --- /dev/null +++ b/mesh-master/utils/git-hooks/pre-commit @@ -0,0 +1,58 @@ +#!/usr/bin/env python + +from __future__ import print_function + +import argparse +import os +import subprocess +import sys + +STYLE_COMMAND = ['./utils/pycodestyle.py', + '--max-line-length=120'] + + +def check_author_details(): + """Check author name and email settings from + environment variables populated by git + """ + name = os.environ.get('GIT_AUTHOR_NAME', '') + email = os.environ.get('GIT_AUTHOR_EMAIL', '') + retval = 0 + if len(name) == 0: + print('Please set your name: git config --global user.name "Joe Scientist"') + retval = 1 + if len(email) == 0 or not email.endswith('.mpg.de'): + print('Please set your work email: git config --global user.email "joe.scientist@tuebingen.mpg.de"') + retval = 1 + return retval + + +def get_files(): + files = subprocess.check_output(['git', 'status', '--porcelain']).split('\n') + modified_files = [f[3:] for f in files if f and f[0] in 'AM' and f.endswith('.py')] + return modified_files + + +def check_python_style(files): + if files: + try: + subprocess.check_output(STYLE_COMMAND + files) + except subprocess.CalledProcessError, e: + print('Python style violations were found:') + print(e.output) + return e.returncode + return 0 + + +def main(): + print('Running pre-commit checks!') + files = get_files() + if check_author_details() or check_python_style(files): + print('Pre-commit check failed! Please fix detected issues and re-try,\n' + 'or force the commit using "git commit --no-verify".') + sys.exit(1) + print('Pre-commit checks passed!') + + +if __name__ == "__main__": + main() diff --git a/mesh-master/utils/pycodestyle.py b/mesh-master/utils/pycodestyle.py new file mode 100644 index 0000000000000000000000000000000000000000..5b7a39c1570123770c9fd024510ddf4d24a79e7b --- /dev/null +++ b/mesh-master/utils/pycodestyle.py @@ -0,0 +1,2360 @@ +#!/usr/bin/env python +# pycodestyle.py - Check Python source code formatting, according to PEP 8 +# +# Copyright (C) 2006-2009 Johann C. Rocholl +# Copyright (C) 2009-2014 Florent Xicluna +# Copyright (C) 2014-2016 Ian Lee +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation files +# (the "Software"), to deal in the Software without restriction, +# including without limitation the rights to use, copy, modify, merge, +# publish, distribute, sublicense, and/or sell copies of the Software, +# and to permit persons to whom the Software is furnished to do so, +# subject to the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +r""" +Check Python source code formatting, according to PEP 8. + +For usage and a list of options, try this: +$ python pycodestyle.py -h + +This program and its regression test suite live here: +https://github.com/pycqa/pycodestyle + +Groups of errors and warnings: +E errors +W warnings +100 indentation +200 whitespace +300 blank lines +400 imports +500 line length +600 deprecation +700 statements +900 syntax error +""" +from __future__ import with_statement + +import inspect +import keyword +import os +import re +import sys +import time +import tokenize +import warnings +import bisect + +try: + from functools import lru_cache +except ImportError: + def lru_cache(maxsize=128): # noqa as it's a fake implementation. + """Does not really need a real a lru_cache, it's just optimization, so + let's just do nothing here. Python 3.2+ will just get better + performances, time to upgrade? + """ + return lambda function: function + +from fnmatch import fnmatch +from optparse import OptionParser + +try: + from configparser import RawConfigParser + from io import TextIOWrapper +except ImportError: + from ConfigParser import RawConfigParser + +__version__ = '2.3.1' + +DEFAULT_EXCLUDE = '.svn,CVS,.bzr,.hg,.git,__pycache__,.tox' +DEFAULT_IGNORE = 'E121,E123,E126,E226,E24,E704,W503' +try: + if sys.platform == 'win32': + USER_CONFIG = os.path.expanduser(r'~\.pycodestyle') + else: + USER_CONFIG = os.path.join( + os.getenv('XDG_CONFIG_HOME') or os.path.expanduser('~/.config'), + 'pycodestyle' + ) +except ImportError: + USER_CONFIG = None + +PROJECT_CONFIG = ('setup.cfg', 'tox.ini') +TESTSUITE_PATH = os.path.join(os.path.dirname(__file__), 'testsuite') +MAX_LINE_LENGTH = 79 +REPORT_FORMAT = { + 'default': '%(path)s:%(row)d:%(col)d: %(code)s %(text)s', + 'pylint': '%(path)s:%(row)d: [%(code)s] %(text)s', +} + +PyCF_ONLY_AST = 1024 +SINGLETONS = frozenset(['False', 'None', 'True']) +KEYWORDS = frozenset(keyword.kwlist + ['print']) - SINGLETONS +UNARY_OPERATORS = frozenset(['>>', '**', '*', '+', '-']) +ARITHMETIC_OP = frozenset(['**', '*', '/', '//', '+', '-']) +WS_OPTIONAL_OPERATORS = ARITHMETIC_OP.union(['^', '&', '|', '<<', '>>', '%']) +WS_NEEDED_OPERATORS = frozenset([ + '**=', '*=', '/=', '//=', '+=', '-=', '!=', '<>', '<', '>', + '%=', '^=', '&=', '|=', '==', '<=', '>=', '<<=', '>>=', '=']) +WHITESPACE = frozenset(' \t') +NEWLINE = frozenset([tokenize.NL, tokenize.NEWLINE]) +SKIP_TOKENS = NEWLINE.union([tokenize.INDENT, tokenize.DEDENT]) +# ERRORTOKEN is triggered by backticks in Python 3 +SKIP_COMMENTS = SKIP_TOKENS.union([tokenize.COMMENT, tokenize.ERRORTOKEN]) +BENCHMARK_KEYS = ['directories', 'files', 'logical lines', 'physical lines'] + +INDENT_REGEX = re.compile(r'([ \t]*)') +RAISE_COMMA_REGEX = re.compile(r'raise\s+\w+\s*,') +RERAISE_COMMA_REGEX = re.compile(r'raise\s+\w+\s*,.*,\s*\w+\s*$') +ERRORCODE_REGEX = re.compile(r'\b[A-Z]\d{3}\b') +DOCSTRING_REGEX = re.compile(r'u?r?["\']') +EXTRANEOUS_WHITESPACE_REGEX = re.compile(r'[[({] | []}),;:]') +WHITESPACE_AFTER_COMMA_REGEX = re.compile(r'[,;:]\s*(?: |\t)') +COMPARE_SINGLETON_REGEX = re.compile(r'(\bNone|\bFalse|\bTrue)?\s*([=!]=)' + r'\s*(?(1)|(None|False|True))\b') +COMPARE_NEGATIVE_REGEX = re.compile(r'\b(not)\s+[^][)(}{ ]+\s+(in|is)\s') +COMPARE_TYPE_REGEX = re.compile(r'(?:[=!]=|is(?:\s+not)?)\s*type(?:s.\w+Type' + r'|\s*\(\s*([^)]*[^ )])\s*\))') +KEYWORD_REGEX = re.compile(r'(\s*)\b(?:%s)\b(\s*)' % r'|'.join(KEYWORDS)) +OPERATOR_REGEX = re.compile(r'(?:[^,\s])(\s*)(?:[-+*/|!<=>%&^]+)(\s*)') +LAMBDA_REGEX = re.compile(r'\blambda\b') +HUNK_REGEX = re.compile(r'^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@.*$') +STARTSWITH_DEF_REGEX = re.compile(r'^(async\s+def|def)\b') +STARTSWITH_TOP_LEVEL_REGEX = re.compile(r'^(async\s+def\s+|def\s+|class\s+|@)') +STARTSWITH_INDENT_STATEMENT_REGEX = re.compile( + r'^\s*({0})\b'.format('|'.join(s.replace(' ', r'\s+') for s in ( + 'def', 'async def', + 'for', 'async for', + 'if', 'elif', 'else', + 'try', 'except', 'finally', + 'with', 'async with', + 'class', + 'while', + ))) +) +DUNDER_REGEX = re.compile(r'^__([^\s]+)__ = ') + +# Work around Python < 2.6 behaviour, which does not generate NL after +# a comment which is on a line by itself. +COMMENT_WITH_NL = tokenize.generate_tokens(['#\n'].pop).send(None)[1] == '#\n' + + +_checks = {'physical_line': {}, 'logical_line': {}, 'tree': {}} + + +def _get_parameters(function): + if sys.version_info >= (3, 3): + return [parameter.name + for parameter + in inspect.signature(function).parameters.values() + if parameter.kind == parameter.POSITIONAL_OR_KEYWORD] + else: + return inspect.getargspec(function)[0] + + +def register_check(check, codes=None): + """Register a new check object.""" + def _add_check(check, kind, codes, args): + if check in _checks[kind]: + _checks[kind][check][0].extend(codes or []) + else: + _checks[kind][check] = (codes or [''], args) + if inspect.isfunction(check): + args = _get_parameters(check) + if args and args[0] in ('physical_line', 'logical_line'): + if codes is None: + codes = ERRORCODE_REGEX.findall(check.__doc__ or '') + _add_check(check, args[0], codes, args) + elif inspect.isclass(check): + if _get_parameters(check.__init__)[:2] == ['self', 'tree']: + _add_check(check, 'tree', codes, None) + return check + + +############################################################################## +# Plugins (check functions) for physical lines +############################################################################## + +@register_check +def tabs_or_spaces(physical_line, indent_char): + r"""Never mix tabs and spaces. + + The most popular way of indenting Python is with spaces only. The + second-most popular way is with tabs only. Code indented with a mixture + of tabs and spaces should be converted to using spaces exclusively. When + invoking the Python command line interpreter with the -t option, it issues + warnings about code that illegally mixes tabs and spaces. When using -tt + these warnings become errors. These options are highly recommended! + + Okay: if a == 0:\n a = 1\n b = 1 + E101: if a == 0:\n a = 1\n\tb = 1 + """ + indent = INDENT_REGEX.match(physical_line).group(1) + for offset, char in enumerate(indent): + if char != indent_char: + return offset, "E101 indentation contains mixed spaces and tabs" + + +@register_check +def tabs_obsolete(physical_line): + r"""For new projects, spaces-only are strongly recommended over tabs. + + Okay: if True:\n return + W191: if True:\n\treturn + """ + indent = INDENT_REGEX.match(physical_line).group(1) + if '\t' in indent: + return indent.index('\t'), "W191 indentation contains tabs" + + +@register_check +def trailing_whitespace(physical_line): + r"""Trailing whitespace is superfluous. + + The warning returned varies on whether the line itself is blank, for easier + filtering for those who want to indent their blank lines. + + Okay: spam(1)\n# + W291: spam(1) \n# + W293: class Foo(object):\n \n bang = 12 + """ + physical_line = physical_line.rstrip('\n') # chr(10), newline + physical_line = physical_line.rstrip('\r') # chr(13), carriage return + physical_line = physical_line.rstrip('\x0c') # chr(12), form feed, ^L + stripped = physical_line.rstrip(' \t\v') + if physical_line != stripped: + if stripped: + return len(stripped), "W291 trailing whitespace" + else: + return 0, "W293 blank line contains whitespace" + + +@register_check +def trailing_blank_lines(physical_line, lines, line_number, total_lines): + r"""Trailing blank lines are superfluous. + + Okay: spam(1) + W391: spam(1)\n + + However the last line should end with a new line (warning W292). + """ + if line_number == total_lines: + stripped_last_line = physical_line.rstrip() + if not stripped_last_line: + return 0, "W391 blank line at end of file" + if stripped_last_line == physical_line: + return len(physical_line), "W292 no newline at end of file" + + +@register_check +def maximum_line_length(physical_line, max_line_length, multiline, noqa): + r"""Limit all lines to a maximum of 79 characters. + + There are still many devices around that are limited to 80 character + lines; plus, limiting windows to 80 characters makes it possible to have + several windows side-by-side. The default wrapping on such devices looks + ugly. Therefore, please limit all lines to a maximum of 79 characters. + For flowing long blocks of text (docstrings or comments), limiting the + length to 72 characters is recommended. + + Reports error E501. + """ + line = physical_line.rstrip() + length = len(line) + if length > max_line_length and not noqa: + # Special case for long URLs in multi-line docstrings or comments, + # but still report the error when the 72 first chars are whitespaces. + chunks = line.split() + if ((len(chunks) == 1 and multiline) or + (len(chunks) == 2 and chunks[0] == '#')) and \ + len(line) - len(chunks[-1]) < max_line_length - 7: + return + if hasattr(line, 'decode'): # Python 2 + # The line could contain multi-byte characters + try: + length = len(line.decode('utf-8')) + except UnicodeError: + pass + if length > max_line_length: + return (max_line_length, "E501 line too long " + "(%d > %d characters)" % (length, max_line_length)) + + +############################################################################## +# Plugins (check functions) for logical lines +############################################################################## + + +@register_check +def blank_lines(logical_line, blank_lines, indent_level, line_number, + blank_before, previous_logical, + previous_unindented_logical_line, previous_indent_level, + lines): + r"""Separate top-level function and class definitions with two blank lines. + + Method definitions inside a class are separated by a single blank line. + + Extra blank lines may be used (sparingly) to separate groups of related + functions. Blank lines may be omitted between a bunch of related + one-liners (e.g. a set of dummy implementations). + + Use blank lines in functions, sparingly, to indicate logical sections. + + Okay: def a():\n pass\n\n\ndef b():\n pass + Okay: def a():\n pass\n\n\nasync def b():\n pass + Okay: def a():\n pass\n\n\n# Foo\n# Bar\n\ndef b():\n pass + Okay: default = 1\nfoo = 1 + Okay: classify = 1\nfoo = 1 + + E301: class Foo:\n b = 0\n def bar():\n pass + E302: def a():\n pass\n\ndef b(n):\n pass + E302: def a():\n pass\n\nasync def b(n):\n pass + E303: def a():\n pass\n\n\n\ndef b(n):\n pass + E303: def a():\n\n\n\n pass + E304: @decorator\n\ndef a():\n pass + E305: def a():\n pass\na() + E306: def a():\n def b():\n pass\n def c():\n pass + """ + if line_number < 3 and not previous_logical: + return # Don't expect blank lines before the first line + if previous_logical.startswith('@'): + if blank_lines: + yield 0, "E304 blank lines found after function decorator" + elif blank_lines > 2 or (indent_level and blank_lines == 2): + yield 0, "E303 too many blank lines (%d)" % blank_lines + elif STARTSWITH_TOP_LEVEL_REGEX.match(logical_line): + if indent_level: + if not (blank_before or previous_indent_level < indent_level or + DOCSTRING_REGEX.match(previous_logical)): + ancestor_level = indent_level + nested = False + # Search backwards for a def ancestor or tree root (top level). + for line in lines[line_number - 2::-1]: + if line.strip() and expand_indent(line) < ancestor_level: + ancestor_level = expand_indent(line) + nested = line.lstrip().startswith('def ') + if nested or ancestor_level == 0: + break + if nested: + yield 0, "E306 expected 1 blank line before a " \ + "nested definition, found 0" + else: + yield 0, "E301 expected 1 blank line, found 0" + elif blank_before != 2: + yield 0, "E302 expected 2 blank lines, found %d" % blank_before + elif (logical_line and not indent_level and blank_before != 2 and + previous_unindented_logical_line.startswith(('def ', 'class '))): + yield 0, "E305 expected 2 blank lines after " \ + "class or function definition, found %d" % blank_before + + +@register_check +def extraneous_whitespace(logical_line): + r"""Avoid extraneous whitespace. + + Avoid extraneous whitespace in these situations: + - Immediately inside parentheses, brackets or braces. + - Immediately before a comma, semicolon, or colon. + + Okay: spam(ham[1], {eggs: 2}) + E201: spam( ham[1], {eggs: 2}) + E201: spam(ham[ 1], {eggs: 2}) + E201: spam(ham[1], { eggs: 2}) + E202: spam(ham[1], {eggs: 2} ) + E202: spam(ham[1 ], {eggs: 2}) + E202: spam(ham[1], {eggs: 2 }) + + E203: if x == 4: print x, y; x, y = y , x + E203: if x == 4: print x, y ; x, y = y, x + E203: if x == 4 : print x, y; x, y = y, x + """ + line = logical_line + for match in EXTRANEOUS_WHITESPACE_REGEX.finditer(line): + text = match.group() + char = text.strip() + found = match.start() + if text == char + ' ': + # assert char in '([{' + yield found + 1, "E201 whitespace after '%s'" % char + elif line[found - 1] != ',': + code = ('E202' if char in '}])' else 'E203') # if char in ',;:' + yield found, "%s whitespace before '%s'" % (code, char) + + +@register_check +def whitespace_around_keywords(logical_line): + r"""Avoid extraneous whitespace around keywords. + + Okay: True and False + E271: True and False + E272: True and False + E273: True and\tFalse + E274: True\tand False + """ + for match in KEYWORD_REGEX.finditer(logical_line): + before, after = match.groups() + + if '\t' in before: + yield match.start(1), "E274 tab before keyword" + elif len(before) > 1: + yield match.start(1), "E272 multiple spaces before keyword" + + if '\t' in after: + yield match.start(2), "E273 tab after keyword" + elif len(after) > 1: + yield match.start(2), "E271 multiple spaces after keyword" + + +@register_check +def missing_whitespace_after_import_keyword(logical_line): + r"""Multiple imports in form from x import (a, b, c) should have space + between import statement and parenthesised name list. + + Okay: from foo import (bar, baz) + E275: from foo import(bar, baz) + E275: from importable.module import(bar, baz) + """ + line = logical_line + indicator = ' import(' + if line.startswith('from '): + found = line.find(indicator) + if -1 < found: + pos = found + len(indicator) - 1 + yield pos, "E275 missing whitespace after keyword" + + +@register_check +def missing_whitespace(logical_line): + r"""Each comma, semicolon or colon should be followed by whitespace. + + Okay: [a, b] + Okay: (3,) + Okay: a[1:4] + Okay: a[:4] + Okay: a[1:] + Okay: a[1:4:2] + E231: ['a','b'] + E231: foo(bar,baz) + E231: [{'a':'b'}] + """ + line = logical_line + for index in range(len(line) - 1): + char = line[index] + if char in ',;:' and line[index + 1] not in WHITESPACE: + before = line[:index] + if char == ':' and before.count('[') > before.count(']') and \ + before.rfind('{') < before.rfind('['): + continue # Slice syntax, no space required + if char == ',' and line[index + 1] == ')': + continue # Allow tuple with only one element: (3,) + yield index, "E231 missing whitespace after '%s'" % char + + +@register_check +def indentation(logical_line, previous_logical, indent_char, + indent_level, previous_indent_level): + r"""Use 4 spaces per indentation level. + + For really old code that you don't want to mess up, you can continue to + use 8-space tabs. + + Okay: a = 1 + Okay: if a == 0:\n a = 1 + E111: a = 1 + E114: # a = 1 + + Okay: for item in items:\n pass + E112: for item in items:\npass + E115: for item in items:\n# Hi\n pass + + Okay: a = 1\nb = 2 + E113: a = 1\n b = 2 + E116: a = 1\n # b = 2 + """ + c = 0 if logical_line else 3 + tmpl = "E11%d %s" if logical_line else "E11%d %s (comment)" + if indent_level % 4: + yield 0, tmpl % (1 + c, "indentation is not a multiple of four") + indent_expect = previous_logical.endswith(':') + if indent_expect and indent_level <= previous_indent_level: + yield 0, tmpl % (2 + c, "expected an indented block") + elif not indent_expect and indent_level > previous_indent_level: + yield 0, tmpl % (3 + c, "unexpected indentation") + + +@register_check +def continued_indentation(logical_line, tokens, indent_level, hang_closing, + indent_char, noqa, verbose): + r"""Continuation lines indentation. + + Continuation lines should align wrapped elements either vertically + using Python's implicit line joining inside parentheses, brackets + and braces, or using a hanging indent. + + When using a hanging indent these considerations should be applied: + - there should be no arguments on the first line, and + - further indentation should be used to clearly distinguish itself as a + continuation line. + + Okay: a = (\n) + E123: a = (\n ) + + Okay: a = (\n 42) + E121: a = (\n 42) + E122: a = (\n42) + E123: a = (\n 42\n ) + E124: a = (24,\n 42\n) + E125: if (\n b):\n pass + E126: a = (\n 42) + E127: a = (24,\n 42) + E128: a = (24,\n 42) + E129: if (a or\n b):\n pass + E131: a = (\n 42\n 24) + """ + first_row = tokens[0][2][0] + nrows = 1 + tokens[-1][2][0] - first_row + if noqa or nrows == 1: + return + + # indent_next tells us whether the next block is indented; assuming + # that it is indented by 4 spaces, then we should not allow 4-space + # indents on the final continuation line; in turn, some other + # indents are allowed to have an extra 4 spaces. + indent_next = logical_line.endswith(':') + + row = depth = 0 + valid_hangs = (4,) if indent_char != '\t' else (4, 8) + # remember how many brackets were opened on each line + parens = [0] * nrows + # relative indents of physical lines + rel_indent = [0] * nrows + # for each depth, collect a list of opening rows + open_rows = [[0]] + # for each depth, memorize the hanging indentation + hangs = [None] + # visual indents + indent_chances = {} + last_indent = tokens[0][2] + visual_indent = None + last_token_multiline = False + # for each depth, memorize the visual indent column + indent = [last_indent[1]] + if verbose >= 3: + print(">>> " + tokens[0][4].rstrip()) + + for token_type, text, start, end, line in tokens: + + newline = row < start[0] - first_row + if newline: + row = start[0] - first_row + newline = not last_token_multiline and token_type not in NEWLINE + + if newline: + # this is the beginning of a continuation line. + last_indent = start + if verbose >= 3: + print("... " + line.rstrip()) + + # record the initial indent. + rel_indent[row] = expand_indent(line) - indent_level + + # identify closing bracket + close_bracket = (token_type == tokenize.OP and text in ']})') + + # is the indent relative to an opening bracket line? + for open_row in reversed(open_rows[depth]): + hang = rel_indent[row] - rel_indent[open_row] + hanging_indent = hang in valid_hangs + if hanging_indent: + break + if hangs[depth]: + hanging_indent = (hang == hangs[depth]) + # is there any chance of visual indent? + visual_indent = (not close_bracket and hang > 0 and + indent_chances.get(start[1])) + + if close_bracket and indent[depth]: + # closing bracket for visual indent + if start[1] != indent[depth]: + yield (start, "E124 closing bracket does not match " + "visual indentation") + elif close_bracket and not hang: + # closing bracket matches indentation of opening bracket's line + if hang_closing: + yield start, "E133 closing bracket is missing indentation" + elif indent[depth] and start[1] < indent[depth]: + if visual_indent is not True: + # visual indent is broken + yield (start, "E128 continuation line " + "under-indented for visual indent") + elif hanging_indent or (indent_next and rel_indent[row] == 8): + # hanging indent is verified + if close_bracket and not hang_closing: + yield (start, "E123 closing bracket does not match " + "indentation of opening bracket's line") + hangs[depth] = hang + elif visual_indent is True: + # visual indent is verified + indent[depth] = start[1] + elif visual_indent in (text, str): + # ignore token lined up with matching one from a previous line + pass + else: + # indent is broken + if hang <= 0: + error = "E122", "missing indentation or outdented" + elif indent[depth]: + error = "E127", "over-indented for visual indent" + elif not close_bracket and hangs[depth]: + error = "E131", "unaligned for hanging indent" + else: + hangs[depth] = hang + if hang > 4: + error = "E126", "over-indented for hanging indent" + else: + error = "E121", "under-indented for hanging indent" + yield start, "%s continuation line %s" % error + + # look for visual indenting + if (parens[row] and + token_type not in (tokenize.NL, tokenize.COMMENT) and + not indent[depth]): + indent[depth] = start[1] + indent_chances[start[1]] = True + if verbose >= 4: + print("bracket depth %s indent to %s" % (depth, start[1])) + # deal with implicit string concatenation + elif (token_type in (tokenize.STRING, tokenize.COMMENT) or + text in ('u', 'ur', 'b', 'br')): + indent_chances[start[1]] = str + # special case for the "if" statement because len("if (") == 4 + elif not indent_chances and not row and not depth and text == 'if': + indent_chances[end[1] + 1] = True + elif text == ':' and line[end[1]:].isspace(): + open_rows[depth].append(row) + + # keep track of bracket depth + if token_type == tokenize.OP: + if text in '([{': + depth += 1 + indent.append(0) + hangs.append(None) + if len(open_rows) == depth: + open_rows.append([]) + open_rows[depth].append(row) + parens[row] += 1 + if verbose >= 4: + print("bracket depth %s seen, col %s, visual min = %s" % + (depth, start[1], indent[depth])) + elif text in ')]}' and depth > 0: + # parent indents should not be more than this one + prev_indent = indent.pop() or last_indent[1] + hangs.pop() + for d in range(depth): + if indent[d] > prev_indent: + indent[d] = 0 + for ind in list(indent_chances): + if ind >= prev_indent: + del indent_chances[ind] + del open_rows[depth + 1:] + depth -= 1 + if depth: + indent_chances[indent[depth]] = True + for idx in range(row, -1, -1): + if parens[idx]: + parens[idx] -= 1 + break + assert len(indent) == depth + 1 + if start[1] not in indent_chances: + # allow lining up tokens + indent_chances[start[1]] = text + + last_token_multiline = (start[0] != end[0]) + if last_token_multiline: + rel_indent[end[0] - first_row] = rel_indent[row] + + if indent_next and expand_indent(line) == indent_level + 4: + pos = (start[0], indent[0] + 4) + if visual_indent: + code = "E129 visually indented line" + else: + code = "E125 continuation line" + yield pos, "%s with same indent as next logical line" % code + + +@register_check +def whitespace_before_parameters(logical_line, tokens): + r"""Avoid extraneous whitespace. + + Avoid extraneous whitespace in the following situations: + - before the open parenthesis that starts the argument list of a + function call. + - before the open parenthesis that starts an indexing or slicing. + + Okay: spam(1) + E211: spam (1) + + Okay: dict['key'] = list[index] + E211: dict ['key'] = list[index] + E211: dict['key'] = list [index] + """ + prev_type, prev_text, __, prev_end, __ = tokens[0] + for index in range(1, len(tokens)): + token_type, text, start, end, __ = tokens[index] + if (token_type == tokenize.OP and + text in '([' and + start != prev_end and + (prev_type == tokenize.NAME or prev_text in '}])') and + # Syntax "class A (B):" is allowed, but avoid it + (index < 2 or tokens[index - 2][1] != 'class') and + # Allow "return (a.foo for a in range(5))" + not keyword.iskeyword(prev_text)): + yield prev_end, "E211 whitespace before '%s'" % text + prev_type = token_type + prev_text = text + prev_end = end + + +@register_check +def whitespace_around_operator(logical_line): + r"""Avoid extraneous whitespace around an operator. + + Okay: a = 12 + 3 + E221: a = 4 + 5 + E222: a = 4 + 5 + E223: a = 4\t+ 5 + E224: a = 4 +\t5 + """ + for match in OPERATOR_REGEX.finditer(logical_line): + before, after = match.groups() + + if '\t' in before: + yield match.start(1), "E223 tab before operator" + elif len(before) > 1: + yield match.start(1), "E221 multiple spaces before operator" + + if '\t' in after: + yield match.start(2), "E224 tab after operator" + elif len(after) > 1: + yield match.start(2), "E222 multiple spaces after operator" + + +@register_check +def missing_whitespace_around_operator(logical_line, tokens): + r"""Surround operators with a single space on either side. + + - Always surround these binary operators with a single space on + either side: assignment (=), augmented assignment (+=, -= etc.), + comparisons (==, <, >, !=, <=, >=, in, not in, is, is not), + Booleans (and, or, not). + + - If operators with different priorities are used, consider adding + whitespace around the operators with the lowest priorities. + + Okay: i = i + 1 + Okay: submitted += 1 + Okay: x = x * 2 - 1 + Okay: hypot2 = x * x + y * y + Okay: c = (a + b) * (a - b) + Okay: foo(bar, key='word', *args, **kwargs) + Okay: alpha[:-i] + + E225: i=i+1 + E225: submitted +=1 + E225: x = x /2 - 1 + E225: z = x **y + E226: c = (a+b) * (a-b) + E226: hypot2 = x*x + y*y + E227: c = a|b + E228: msg = fmt%(errno, errmsg) + """ + parens = 0 + need_space = False + prev_type = tokenize.OP + prev_text = prev_end = None + for token_type, text, start, end, line in tokens: + if token_type in SKIP_COMMENTS: + continue + if text in ('(', 'lambda'): + parens += 1 + elif text == ')': + parens -= 1 + if need_space: + if start != prev_end: + # Found a (probably) needed space + if need_space is not True and not need_space[1]: + yield (need_space[0], + "E225 missing whitespace around operator") + need_space = False + elif text == '>' and prev_text in ('<', '-'): + # Tolerate the "<>" operator, even if running Python 3 + # Deal with Python 3's annotated return value "->" + pass + else: + if need_space is True or need_space[1]: + # A needed trailing space was not found + yield prev_end, "E225 missing whitespace around operator" + elif prev_text != '**': + code, optype = 'E226', 'arithmetic' + if prev_text == '%': + code, optype = 'E228', 'modulo' + elif prev_text not in ARITHMETIC_OP: + code, optype = 'E227', 'bitwise or shift' + yield (need_space[0], "%s missing whitespace " + "around %s operator" % (code, optype)) + need_space = False + elif token_type == tokenize.OP and prev_end is not None: + if text == '=' and parens: + # Allow keyword args or defaults: foo(bar=None). + pass + elif text in WS_NEEDED_OPERATORS: + need_space = True + elif text in UNARY_OPERATORS: + # Check if the operator is being used as a binary operator + # Allow unary operators: -123, -x, +1. + # Allow argument unpacking: foo(*args, **kwargs). + if (prev_text in '}])' if prev_type == tokenize.OP + else prev_text not in KEYWORDS): + need_space = None + elif text in WS_OPTIONAL_OPERATORS: + need_space = None + + if need_space is None: + # Surrounding space is optional, but ensure that + # trailing space matches opening space + need_space = (prev_end, start != prev_end) + elif need_space and start == prev_end: + # A needed opening space was not found + yield prev_end, "E225 missing whitespace around operator" + need_space = False + prev_type = token_type + prev_text = text + prev_end = end + + +@register_check +def whitespace_around_comma(logical_line): + r"""Avoid extraneous whitespace after a comma or a colon. + + Note: these checks are disabled by default + + Okay: a = (1, 2) + E241: a = (1, 2) + E242: a = (1,\t2) + """ + line = logical_line + for m in WHITESPACE_AFTER_COMMA_REGEX.finditer(line): + found = m.start() + 1 + if '\t' in m.group(): + yield found, "E242 tab after '%s'" % m.group()[0] + else: + yield found, "E241 multiple spaces after '%s'" % m.group()[0] + + +@register_check +def whitespace_around_named_parameter_equals(logical_line, tokens): + r"""Don't use spaces around the '=' sign in function arguments. + + Don't use spaces around the '=' sign when used to indicate a + keyword argument or a default parameter value. + + Okay: def complex(real, imag=0.0): + Okay: return magic(r=real, i=imag) + Okay: boolean(a == b) + Okay: boolean(a != b) + Okay: boolean(a <= b) + Okay: boolean(a >= b) + Okay: def foo(arg: int = 42): + Okay: async def foo(arg: int = 42): + + E251: def complex(real, imag = 0.0): + E251: return magic(r = real, i = imag) + """ + parens = 0 + no_space = False + prev_end = None + annotated_func_arg = False + in_def = bool(STARTSWITH_DEF_REGEX.match(logical_line)) + message = "E251 unexpected spaces around keyword / parameter equals" + for token_type, text, start, end, line in tokens: + if token_type == tokenize.NL: + continue + if no_space: + no_space = False + if start != prev_end: + yield (prev_end, message) + if token_type == tokenize.OP: + if text in '([': + parens += 1 + elif text in ')]': + parens -= 1 + elif in_def and text == ':' and parens == 1: + annotated_func_arg = True + elif parens and text == ',' and parens == 1: + annotated_func_arg = False + elif parens and text == '=' and not annotated_func_arg: + no_space = True + if start != prev_end: + yield (prev_end, message) + if not parens: + annotated_func_arg = False + + prev_end = end + + +@register_check +def whitespace_before_comment(logical_line, tokens): + r"""Separate inline comments by at least two spaces. + + An inline comment is a comment on the same line as a statement. Inline + comments should be separated by at least two spaces from the statement. + They should start with a # and a single space. + + Each line of a block comment starts with a # and a single space + (unless it is indented text inside the comment). + + Okay: x = x + 1 # Increment x + Okay: x = x + 1 # Increment x + Okay: # Block comment + E261: x = x + 1 # Increment x + E262: x = x + 1 #Increment x + E262: x = x + 1 # Increment x + E265: #Block comment + E266: ### Block comment + """ + prev_end = (0, 0) + for token_type, text, start, end, line in tokens: + if token_type == tokenize.COMMENT: + inline_comment = line[:start[1]].strip() + if inline_comment: + if prev_end[0] == start[0] and start[1] < prev_end[1] + 2: + yield (prev_end, + "E261 at least two spaces before inline comment") + symbol, sp, comment = text.partition(' ') + bad_prefix = symbol not in '#:' and (symbol.lstrip('#')[:1] or '#') + if inline_comment: + if bad_prefix or comment[:1] in WHITESPACE: + yield start, "E262 inline comment should start with '# '" + elif bad_prefix and (bad_prefix != '!' or start[0] > 1): + if bad_prefix != '#': + yield start, "E265 block comment should start with '# '" + elif comment: + yield start, "E266 too many leading '#' for block comment" + elif token_type != tokenize.NL: + prev_end = end + + +@register_check +def imports_on_separate_lines(logical_line): + r"""Place imports on separate lines. + + Okay: import os\nimport sys + E401: import sys, os + + Okay: from subprocess import Popen, PIPE + Okay: from myclas import MyClass + Okay: from foo.bar.yourclass import YourClass + Okay: import myclass + Okay: import foo.bar.yourclass + """ + line = logical_line + if line.startswith('import '): + found = line.find(',') + if -1 < found and ';' not in line[:found]: + yield found, "E401 multiple imports on one line" + + +@register_check +def module_imports_on_top_of_file( + logical_line, indent_level, checker_state, noqa): + r"""Place imports at the top of the file. + + Always put imports at the top of the file, just after any module comments + and docstrings, and before module globals and constants. + + Okay: import os + Okay: # this is a comment\nimport os + Okay: '''this is a module docstring'''\nimport os + Okay: r'''this is a module docstring'''\nimport os + Okay: + try:\n\timport x\nexcept ImportError:\n\tpass\nelse:\n\tpass\nimport y + Okay: + try:\n\timport x\nexcept ImportError:\n\tpass\nfinally:\n\tpass\nimport y + E402: a=1\nimport os + E402: 'One string'\n"Two string"\nimport os + E402: a=1\nfrom sys import x + + Okay: if x:\n import os + """ + def is_string_literal(line): + if line[0] in 'uUbB': + line = line[1:] + if line and line[0] in 'rR': + line = line[1:] + return line and (line[0] == '"' or line[0] == "'") + + allowed_try_keywords = ('try', 'except', 'else', 'finally') + + if indent_level: # Allow imports in conditional statements or functions + return + if not logical_line: # Allow empty lines or comments + return + if noqa: + return + line = logical_line + if line.startswith('import ') or line.startswith('from '): + if checker_state.get('seen_non_imports', False): + yield 0, "E402 module level import not at top of file" + elif re.match(DUNDER_REGEX, line): + return + elif any(line.startswith(kw) for kw in allowed_try_keywords): + # Allow try, except, else, finally keywords intermixed with imports in + # order to support conditional importing + return + elif is_string_literal(line): + # The first literal is a docstring, allow it. Otherwise, report error. + if checker_state.get('seen_docstring', False): + checker_state['seen_non_imports'] = True + else: + checker_state['seen_docstring'] = True + else: + checker_state['seen_non_imports'] = True + + +@register_check +def compound_statements(logical_line): + r"""Compound statements (on the same line) are generally discouraged. + + While sometimes it's okay to put an if/for/while with a small body + on the same line, never do this for multi-clause statements. + Also avoid folding such long lines! + + Always use a def statement instead of an assignment statement that + binds a lambda expression directly to a name. + + Okay: if foo == 'blah':\n do_blah_thing() + Okay: do_one() + Okay: do_two() + Okay: do_three() + + E701: if foo == 'blah': do_blah_thing() + E701: for x in lst: total += x + E701: while t < 10: t = delay() + E701: if foo == 'blah': do_blah_thing() + E701: else: do_non_blah_thing() + E701: try: something() + E701: finally: cleanup() + E701: if foo == 'blah': one(); two(); three() + E702: do_one(); do_two(); do_three() + E703: do_four(); # useless semicolon + E704: def f(x): return 2*x + E731: f = lambda x: 2*x + """ + line = logical_line + last_char = len(line) - 1 + found = line.find(':') + prev_found = 0 + counts = dict((char, 0) for char in '{}[]()') + while -1 < found < last_char: + update_counts(line[prev_found:found], counts) + if ((counts['{'] <= counts['}'] and # {'a': 1} (dict) + counts['['] <= counts[']'] and # [1:2] (slice) + counts['('] <= counts[')'])): # (annotation) + lambda_kw = LAMBDA_REGEX.search(line, 0, found) + if lambda_kw: + before = line[:lambda_kw.start()].rstrip() + if before[-1:] == '=' and isidentifier(before[:-1].strip()): + yield 0, ("E731 do not assign a lambda expression, use a " + "def") + break + if STARTSWITH_DEF_REGEX.match(line): + yield 0, "E704 multiple statements on one line (def)" + elif STARTSWITH_INDENT_STATEMENT_REGEX.match(line): + yield found, "E701 multiple statements on one line (colon)" + prev_found = found + found = line.find(':', found + 1) + found = line.find(';') + while -1 < found: + if found < last_char: + yield found, "E702 multiple statements on one line (semicolon)" + else: + yield found, "E703 statement ends with a semicolon" + found = line.find(';', found + 1) + + +@register_check +def explicit_line_join(logical_line, tokens): + r"""Avoid explicit line join between brackets. + + The preferred way of wrapping long lines is by using Python's implied line + continuation inside parentheses, brackets and braces. Long lines can be + broken over multiple lines by wrapping expressions in parentheses. These + should be used in preference to using a backslash for line continuation. + + E502: aaa = [123, \\n 123] + E502: aaa = ("bbb " \\n "ccc") + + Okay: aaa = [123,\n 123] + Okay: aaa = ("bbb "\n "ccc") + Okay: aaa = "bbb " \\n "ccc" + Okay: aaa = 123 # \\ + """ + prev_start = prev_end = parens = 0 + comment = False + backslash = None + for token_type, text, start, end, line in tokens: + if token_type == tokenize.COMMENT: + comment = True + if start[0] != prev_start and parens and backslash and not comment: + yield backslash, "E502 the backslash is redundant between brackets" + if end[0] != prev_end: + if line.rstrip('\r\n').endswith('\\'): + backslash = (end[0], len(line.splitlines()[-1]) - 1) + else: + backslash = None + prev_start = prev_end = end[0] + else: + prev_start = start[0] + if token_type == tokenize.OP: + if text in '([{': + parens += 1 + elif text in ')]}': + parens -= 1 + + +@register_check +def break_around_binary_operator(logical_line, tokens): + r""" + Avoid breaks before binary operators. + + The preferred place to break around a binary operator is after the + operator, not before it. + + W503: (width == 0\n + height == 0) + W503: (width == 0\n and height == 0) + + Okay: (width == 0 +\n height == 0) + Okay: foo(\n -x) + Okay: foo(x\n []) + Okay: x = '''\n''' + '' + Okay: foo(x,\n -y) + Okay: foo(x, # comment\n -y) + Okay: var = (1 &\n ~2) + Okay: var = (1 /\n -2) + Okay: var = (1 +\n -1 +\n -2) + """ + def is_binary_operator(token_type, text): + # The % character is strictly speaking a binary operator, but the + # common usage seems to be to put it next to the format parameters, + # after a line break. + return ((token_type == tokenize.OP or text in ['and', 'or']) and + text not in "()[]{},:.;@=%~") + + line_break = False + unary_context = True + # Previous non-newline token types and text + previous_token_type = None + previous_text = None + for token_type, text, start, end, line in tokens: + if token_type == tokenize.COMMENT: + continue + if ('\n' in text or '\r' in text) and token_type != tokenize.STRING: + line_break = True + else: + if (is_binary_operator(token_type, text) and line_break and + not unary_context and + not is_binary_operator(previous_token_type, + previous_text)): + yield start, "W503 line break before binary operator" + unary_context = text in '([{,;' + line_break = False + previous_token_type = token_type + previous_text = text + + +@register_check +def comparison_to_singleton(logical_line, noqa): + r"""Comparison to singletons should use "is" or "is not". + + Comparisons to singletons like None should always be done + with "is" or "is not", never the equality operators. + + Okay: if arg is not None: + E711: if arg != None: + E711: if None == arg: + E712: if arg == True: + E712: if False == arg: + + Also, beware of writing if x when you really mean if x is not None -- + e.g. when testing whether a variable or argument that defaults to None was + set to some other value. The other value might have a type (such as a + container) that could be false in a boolean context! + """ + match = not noqa and COMPARE_SINGLETON_REGEX.search(logical_line) + if match: + singleton = match.group(1) or match.group(3) + same = (match.group(2) == '==') + + msg = "'if cond is %s:'" % (('' if same else 'not ') + singleton) + if singleton in ('None',): + code = 'E711' + else: + code = 'E712' + nonzero = ((singleton == 'True' and same) or + (singleton == 'False' and not same)) + msg += " or 'if %scond:'" % ('' if nonzero else 'not ') + yield match.start(2), ("%s comparison to %s should be %s" % + (code, singleton, msg)) + + +@register_check +def comparison_negative(logical_line): + r"""Negative comparison should be done using "not in" and "is not". + + Okay: if x not in y:\n pass + Okay: assert (X in Y or X is Z) + Okay: if not (X in Y):\n pass + Okay: zz = x is not y + E713: Z = not X in Y + E713: if not X.B in Y:\n pass + E714: if not X is Y:\n pass + E714: Z = not X.B is Y + """ + match = COMPARE_NEGATIVE_REGEX.search(logical_line) + if match: + pos = match.start(1) + if match.group(2) == 'in': + yield pos, "E713 test for membership should be 'not in'" + else: + yield pos, "E714 test for object identity should be 'is not'" + + +@register_check +def comparison_type(logical_line, noqa): + r"""Object type comparisons should always use isinstance(). + + Do not compare types directly. + + Okay: if isinstance(obj, int): + E721: if type(obj) is type(1): + + When checking if an object is a string, keep in mind that it might be a + unicode string too! In Python 2.3, str and unicode have a common base + class, basestring, so you can do: + + Okay: if isinstance(obj, basestring): + Okay: if type(a1) is type(b1): + """ + match = COMPARE_TYPE_REGEX.search(logical_line) + if match and not noqa: + inst = match.group(1) + if inst and isidentifier(inst) and inst not in SINGLETONS: + return # Allow comparison for types which are not obvious + yield match.start(), "E721 do not compare types, use 'isinstance()'" + + +@register_check +def bare_except(logical_line, noqa): + r"""When catching exceptions, mention specific exceptions when possible. + + Okay: except Exception: + Okay: except BaseException: + E722: except: + """ + if noqa: + return + + regex = re.compile(r"except\s*:") + match = regex.match(logical_line) + if match: + yield match.start(), "E722 do not use bare except'" + + +@register_check +def ambiguous_identifier(logical_line, tokens): + r"""Never use the characters 'l', 'O', or 'I' as variable names. + + In some fonts, these characters are indistinguishable from the numerals + one and zero. When tempted to use 'l', use 'L' instead. + + Okay: L = 0 + Okay: o = 123 + Okay: i = 42 + E741: l = 0 + E741: O = 123 + E741: I = 42 + + Variables can be bound in several other contexts, including class and + function definitions, 'global' and 'nonlocal' statements, exception + handlers, and 'with' statements. + + Okay: except AttributeError as o: + Okay: with lock as L: + E741: except AttributeError as O: + E741: with lock as l: + E741: global I + E741: nonlocal l + E742: class I(object): + E743: def l(x): + """ + idents_to_avoid = ('l', 'O', 'I') + prev_type, prev_text, prev_start, prev_end, __ = tokens[0] + for token_type, text, start, end, line in tokens[1:]: + ident = pos = None + # identifiers on the lhs of an assignment operator + if token_type == tokenize.OP and '=' in text: + if prev_text in idents_to_avoid: + ident = prev_text + pos = prev_start + # identifiers bound to a value with 'as', 'global', or 'nonlocal' + if prev_text in ('as', 'global', 'nonlocal'): + if text in idents_to_avoid: + ident = text + pos = start + if prev_text == 'class': + if text in idents_to_avoid: + yield start, "E742 ambiguous class definition '%s'" % text + if prev_text == 'def': + if text in idents_to_avoid: + yield start, "E743 ambiguous function definition '%s'" % text + if ident: + yield pos, "E741 ambiguous variable name '%s'" % ident + prev_text = text + prev_start = start + + +@register_check +def python_3000_has_key(logical_line, noqa): + r"""The {}.has_key() method is removed in Python 3: use the 'in' operator. + + Okay: if "alph" in d:\n print d["alph"] + W601: assert d.has_key('alph') + """ + pos = logical_line.find('.has_key(') + if pos > -1 and not noqa: + yield pos, "W601 .has_key() is deprecated, use 'in'" + + +@register_check +def python_3000_raise_comma(logical_line): + r"""When raising an exception, use "raise ValueError('message')". + + The older form is removed in Python 3. + + Okay: raise DummyError("Message") + W602: raise DummyError, "Message" + """ + match = RAISE_COMMA_REGEX.match(logical_line) + if match and not RERAISE_COMMA_REGEX.match(logical_line): + yield match.end() - 1, "W602 deprecated form of raising exception" + + +@register_check +def python_3000_not_equal(logical_line): + r"""New code should always use != instead of <>. + + The older syntax is removed in Python 3. + + Okay: if a != 'no': + W603: if a <> 'no': + """ + pos = logical_line.find('<>') + if pos > -1: + yield pos, "W603 '<>' is deprecated, use '!='" + + +@register_check +def python_3000_backticks(logical_line): + r"""Use repr() instead of backticks in Python 3. + + Okay: val = repr(1 + 2) + W604: val = `1 + 2` + """ + pos = logical_line.find('`') + if pos > -1: + yield pos, "W604 backticks are deprecated, use 'repr()'" + + +############################################################################## +# Helper functions +############################################################################## + + +if sys.version_info < (3,): + # Python 2: implicit encoding. + def readlines(filename): + """Read the source code.""" + with open(filename, 'rU') as f: + return f.readlines() + isidentifier = re.compile(r'[a-zA-Z_]\w*$').match + stdin_get_value = sys.stdin.read +else: + # Python 3 + def readlines(filename): + """Read the source code.""" + try: + with open(filename, 'rb') as f: + (coding, lines) = tokenize.detect_encoding(f.readline) + f = TextIOWrapper(f, coding, line_buffering=True) + return [line.decode(coding) for line in lines] + f.readlines() + except (LookupError, SyntaxError, UnicodeError): + # Fall back if file encoding is improperly declared + with open(filename, encoding='latin-1') as f: + return f.readlines() + isidentifier = str.isidentifier + + def stdin_get_value(): + """Read the value from stdin.""" + return TextIOWrapper(sys.stdin.buffer, errors='ignore').read() + +noqa = lru_cache(512)(re.compile(r'# no(?:qa|pep8)\b', re.I).search) + + +def expand_indent(line): + r"""Return the amount of indentation. + + Tabs are expanded to the next multiple of 8. + + >>> expand_indent(' ') + 4 + >>> expand_indent('\t') + 8 + >>> expand_indent(' \t') + 8 + >>> expand_indent(' \t') + 16 + """ + if '\t' not in line: + return len(line) - len(line.lstrip()) + result = 0 + for char in line: + if char == '\t': + result = result // 8 * 8 + 8 + elif char == ' ': + result += 1 + else: + break + return result + + +def mute_string(text): + """Replace contents with 'xxx' to prevent syntax matching. + + >>> mute_string('"abc"') + '"xxx"' + >>> mute_string("'''abc'''") + "'''xxx'''" + >>> mute_string("r'abc'") + "r'xxx'" + """ + # String modifiers (e.g. u or r) + start = text.index(text[-1]) + 1 + end = len(text) - 1 + # Triple quotes + if text[-3:] in ('"""', "'''"): + start += 2 + end -= 2 + return text[:start] + 'x' * (end - start) + text[end:] + + +def parse_udiff(diff, patterns=None, parent='.'): + """Return a dictionary of matching lines.""" + # For each file of the diff, the entry key is the filename, + # and the value is a set of row numbers to consider. + rv = {} + path = nrows = None + for line in diff.splitlines(): + if nrows: + if line[:1] != '-': + nrows -= 1 + continue + if line[:3] == '@@ ': + hunk_match = HUNK_REGEX.match(line) + (row, nrows) = [int(g or '1') for g in hunk_match.groups()] + rv[path].update(range(row, row + nrows)) + elif line[:3] == '+++': + path = line[4:].split('\t', 1)[0] + if path[:2] == 'b/': + path = path[2:] + rv[path] = set() + return dict([(os.path.join(parent, path), rows) + for (path, rows) in rv.items() + if rows and filename_match(path, patterns)]) + + +def normalize_paths(value, parent=os.curdir): + """Parse a comma-separated list of paths. + + Return a list of absolute paths. + """ + if not value: + return [] + if isinstance(value, list): + return value + paths = [] + for path in value.split(','): + path = path.strip() + if '/' in path: + path = os.path.abspath(os.path.join(parent, path)) + paths.append(path.rstrip('/')) + return paths + + +def filename_match(filename, patterns, default=True): + """Check if patterns contains a pattern that matches filename. + + If patterns is unspecified, this always returns True. + """ + if not patterns: + return default + return any(fnmatch(filename, pattern) for pattern in patterns) + + +def update_counts(s, counts): + r"""Adds one to the counts of each appearance of characters in s, + for characters in counts""" + for char in s: + if char in counts: + counts[char] += 1 + + +def _is_eol_token(token): + return token[0] in NEWLINE or token[4][token[3][1]:].lstrip() == '\\\n' + + +if COMMENT_WITH_NL: + def _is_eol_token(token, _eol_token=_is_eol_token): + return _eol_token(token) or (token[0] == tokenize.COMMENT and + token[1] == token[4]) + +############################################################################## +# Framework to run all checks +############################################################################## + + +class Checker(object): + """Load a Python source file, tokenize it, check coding style.""" + + def __init__(self, filename=None, lines=None, + options=None, report=None, **kwargs): + if options is None: + options = StyleGuide(kwargs).options + else: + assert not kwargs + self._io_error = None + self._physical_checks = options.physical_checks + self._logical_checks = options.logical_checks + self._ast_checks = options.ast_checks + self.max_line_length = options.max_line_length + self.multiline = False # in a multiline string? + self.hang_closing = options.hang_closing + self.verbose = options.verbose + self.filename = filename + # Dictionary where a checker can store its custom state. + self._checker_states = {} + if filename is None: + self.filename = 'stdin' + self.lines = lines or [] + elif filename == '-': + self.filename = 'stdin' + self.lines = stdin_get_value().splitlines(True) + elif lines is None: + try: + self.lines = readlines(filename) + except IOError: + (exc_type, exc) = sys.exc_info()[:2] + self._io_error = '%s: %s' % (exc_type.__name__, exc) + self.lines = [] + else: + self.lines = lines + if self.lines: + ord0 = ord(self.lines[0][0]) + if ord0 in (0xef, 0xfeff): # Strip the UTF-8 BOM + if ord0 == 0xfeff: + self.lines[0] = self.lines[0][1:] + elif self.lines[0][:3] == '\xef\xbb\xbf': + self.lines[0] = self.lines[0][3:] + self.report = report or options.report + self.report_error = self.report.error + self.noqa = False + + def report_invalid_syntax(self): + """Check if the syntax is valid.""" + (exc_type, exc) = sys.exc_info()[:2] + if len(exc.args) > 1: + offset = exc.args[1] + if len(offset) > 2: + offset = offset[1:3] + else: + offset = (1, 0) + self.report_error(offset[0], offset[1] or 0, + 'E901 %s: %s' % (exc_type.__name__, exc.args[0]), + self.report_invalid_syntax) + + def readline(self): + """Get the next line from the input buffer.""" + if self.line_number >= self.total_lines: + return '' + line = self.lines[self.line_number] + self.line_number += 1 + if self.indent_char is None and line[:1] in WHITESPACE: + self.indent_char = line[0] + return line + + def run_check(self, check, argument_names): + """Run a check plugin.""" + arguments = [] + for name in argument_names: + arguments.append(getattr(self, name)) + return check(*arguments) + + def init_checker_state(self, name, argument_names): + """Prepare custom state for the specific checker plugin.""" + if 'checker_state' in argument_names: + self.checker_state = self._checker_states.setdefault(name, {}) + + def check_physical(self, line): + """Run all physical checks on a raw input line.""" + self.physical_line = line + for name, check, argument_names in self._physical_checks: + self.init_checker_state(name, argument_names) + result = self.run_check(check, argument_names) + if result is not None: + (offset, text) = result + self.report_error(self.line_number, offset, text, check) + if text[:4] == 'E101': + self.indent_char = line[0] + + def build_tokens_line(self): + """Build a logical line from tokens.""" + logical = [] + comments = [] + length = 0 + prev_row = prev_col = mapping = None + for token_type, text, start, end, line in self.tokens: + if token_type in SKIP_TOKENS: + continue + if not mapping: + mapping = [(0, start)] + if token_type == tokenize.COMMENT: + comments.append(text) + continue + if token_type == tokenize.STRING: + text = mute_string(text) + if prev_row: + (start_row, start_col) = start + if prev_row != start_row: # different row + prev_text = self.lines[prev_row - 1][prev_col - 1] + if prev_text == ',' or (prev_text not in '{[(' and + text not in '}])'): + text = ' ' + text + elif prev_col != start_col: # different column + text = line[prev_col:start_col] + text + logical.append(text) + length += len(text) + mapping.append((length, end)) + (prev_row, prev_col) = end + self.logical_line = ''.join(logical) + self.noqa = comments and noqa(''.join(comments)) + return mapping + + def check_logical(self): + """Build a line from tokens and run all logical checks on it.""" + self.report.increment_logical_line() + mapping = self.build_tokens_line() + if not mapping: + return + + mapping_offsets = [offset for offset, _ in mapping] + (start_row, start_col) = mapping[0][1] + start_line = self.lines[start_row - 1] + self.indent_level = expand_indent(start_line[:start_col]) + if self.blank_before < self.blank_lines: + self.blank_before = self.blank_lines + if self.verbose >= 2: + print(self.logical_line[:80].rstrip()) + for name, check, argument_names in self._logical_checks: + if self.verbose >= 4: + print(' ' + name) + self.init_checker_state(name, argument_names) + for offset, text in self.run_check(check, argument_names) or (): + if not isinstance(offset, tuple): + # As mappings are ordered, bisecting is a fast way + # to find a given offset in them. + token_offset, pos = mapping[bisect.bisect_left( + mapping_offsets, offset)] + offset = (pos[0], pos[1] + offset - token_offset) + self.report_error(offset[0], offset[1], text, check) + if self.logical_line: + self.previous_indent_level = self.indent_level + self.previous_logical = self.logical_line + if not self.indent_level: + self.previous_unindented_logical_line = self.logical_line + self.blank_lines = 0 + self.tokens = [] + + def check_ast(self): + """Build the file's AST and run all AST checks.""" + try: + tree = compile(''.join(self.lines), '', 'exec', PyCF_ONLY_AST) + except (ValueError, SyntaxError, TypeError): + return self.report_invalid_syntax() + for name, cls, __ in self._ast_checks: + checker = cls(tree, self.filename) + for lineno, offset, text, check in checker.run(): + if not self.lines or not noqa(self.lines[lineno - 1]): + self.report_error(lineno, offset, text, check) + + def generate_tokens(self): + """Tokenize the file, run physical line checks and yield tokens.""" + if self._io_error: + self.report_error(1, 0, 'E902 %s' % self._io_error, readlines) + tokengen = tokenize.generate_tokens(self.readline) + try: + for token in tokengen: + if token[2][0] > self.total_lines: + return + self.noqa = token[4] and noqa(token[4]) + self.maybe_check_physical(token) + yield token + except (SyntaxError, tokenize.TokenError): + self.report_invalid_syntax() + + def maybe_check_physical(self, token): + """If appropriate (based on token), check current physical line(s).""" + # Called after every token, but act only on end of line. + if _is_eol_token(token): + # Obviously, a newline token ends a single physical line. + self.check_physical(token[4]) + elif token[0] == tokenize.STRING and '\n' in token[1]: + # Less obviously, a string that contains newlines is a + # multiline string, either triple-quoted or with internal + # newlines backslash-escaped. Check every physical line in the + # string *except* for the last one: its newline is outside of + # the multiline string, so we consider it a regular physical + # line, and will check it like any other physical line. + # + # Subtleties: + # - we don't *completely* ignore the last line; if it contains + # the magical "# noqa" comment, we disable all physical + # checks for the entire multiline string + # - have to wind self.line_number back because initially it + # points to the last line of the string, and we want + # check_physical() to give accurate feedback + if noqa(token[4]): + return + self.multiline = True + self.line_number = token[2][0] + _, src, (_, offset), _, _ = token + src = self.lines[self.line_number - 1][:offset] + src + for line in src.split('\n')[:-1]: + self.check_physical(line + '\n') + self.line_number += 1 + self.multiline = False + + def check_all(self, expected=None, line_offset=0): + """Run all checks on the input file.""" + self.report.init_file(self.filename, self.lines, expected, line_offset) + self.total_lines = len(self.lines) + if self._ast_checks: + self.check_ast() + self.line_number = 0 + self.indent_char = None + self.indent_level = self.previous_indent_level = 0 + self.previous_logical = '' + self.previous_unindented_logical_line = '' + self.tokens = [] + self.blank_lines = self.blank_before = 0 + parens = 0 + for token in self.generate_tokens(): + self.tokens.append(token) + token_type, text = token[0:2] + if self.verbose >= 3: + if token[2][0] == token[3][0]: + pos = '[%s:%s]' % (token[2][1] or '', token[3][1]) + else: + pos = 'l.%s' % token[3][0] + print('l.%s\t%s\t%s\t%r' % + (token[2][0], pos, tokenize.tok_name[token[0]], text)) + if token_type == tokenize.OP: + if text in '([{': + parens += 1 + elif text in '}])': + parens -= 1 + elif not parens: + if token_type in NEWLINE: + if token_type == tokenize.NEWLINE: + self.check_logical() + self.blank_before = 0 + elif len(self.tokens) == 1: + # The physical line contains only this token. + self.blank_lines += 1 + del self.tokens[0] + else: + self.check_logical() + elif COMMENT_WITH_NL and token_type == tokenize.COMMENT: + if len(self.tokens) == 1: + # The comment also ends a physical line + token = list(token) + token[1] = text.rstrip('\r\n') + token[3] = (token[2][0], token[2][1] + len(token[1])) + self.tokens = [tuple(token)] + self.check_logical() + if self.tokens: + self.check_physical(self.lines[-1]) + self.check_logical() + return self.report.get_file_results() + + +class BaseReport(object): + """Collect the results of the checks.""" + + print_filename = False + + def __init__(self, options): + self._benchmark_keys = options.benchmark_keys + self._ignore_code = options.ignore_code + # Results + self.elapsed = 0 + self.total_errors = 0 + self.counters = dict.fromkeys(self._benchmark_keys, 0) + self.messages = {} + + def start(self): + """Start the timer.""" + self._start_time = time.time() + + def stop(self): + """Stop the timer.""" + self.elapsed = time.time() - self._start_time + + def init_file(self, filename, lines, expected, line_offset): + """Signal a new file.""" + self.filename = filename + self.lines = lines + self.expected = expected or () + self.line_offset = line_offset + self.file_errors = 0 + self.counters['files'] += 1 + self.counters['physical lines'] += len(lines) + + def increment_logical_line(self): + """Signal a new logical line.""" + self.counters['logical lines'] += 1 + + def error(self, line_number, offset, text, check): + """Report an error, according to options.""" + code = text[:4] + if self._ignore_code(code): + return + if code in self.counters: + self.counters[code] += 1 + else: + self.counters[code] = 1 + self.messages[code] = text[5:] + # Don't care about expected errors or warnings + if code in self.expected: + return + if self.print_filename and not self.file_errors: + print(self.filename) + self.file_errors += 1 + self.total_errors += 1 + return code + + def get_file_results(self): + """Return the count of errors and warnings for this file.""" + return self.file_errors + + def get_count(self, prefix=''): + """Return the total count of errors and warnings.""" + return sum([self.counters[key] + for key in self.messages if key.startswith(prefix)]) + + def get_statistics(self, prefix=''): + """Get statistics for message codes that start with the prefix. + + prefix='' matches all errors and warnings + prefix='E' matches all errors + prefix='W' matches all warnings + prefix='E4' matches all errors that have to do with imports + """ + return ['%-7s %s %s' % (self.counters[key], key, self.messages[key]) + for key in sorted(self.messages) if key.startswith(prefix)] + + def print_statistics(self, prefix=''): + """Print overall statistics (number of errors and warnings).""" + for line in self.get_statistics(prefix): + print(line) + + def print_benchmark(self): + """Print benchmark numbers.""" + print('%-7.2f %s' % (self.elapsed, 'seconds elapsed')) + if self.elapsed: + for key in self._benchmark_keys: + print('%-7d %s per second (%d total)' % + (self.counters[key] / self.elapsed, key, + self.counters[key])) + + +class FileReport(BaseReport): + """Collect the results of the checks and print only the filenames.""" + + print_filename = True + + +class StandardReport(BaseReport): + """Collect and print the results of the checks.""" + + def __init__(self, options): + super(StandardReport, self).__init__(options) + self._fmt = REPORT_FORMAT.get(options.format.lower(), + options.format) + self._repeat = options.repeat + self._show_source = options.show_source + self._show_pep8 = options.show_pep8 + + def init_file(self, filename, lines, expected, line_offset): + """Signal a new file.""" + self._deferred_print = [] + return super(StandardReport, self).init_file( + filename, lines, expected, line_offset) + + def error(self, line_number, offset, text, check): + """Report an error, according to options.""" + code = super(StandardReport, self).error(line_number, offset, + text, check) + if code and (self.counters[code] == 1 or self._repeat): + self._deferred_print.append( + (line_number, offset, code, text[5:], check.__doc__)) + return code + + def get_file_results(self): + """Print the result and return the overall count for this file.""" + self._deferred_print.sort() + for line_number, offset, code, text, doc in self._deferred_print: + print(self._fmt % { + 'path': self.filename, + 'row': self.line_offset + line_number, 'col': offset + 1, + 'code': code, 'text': text, + }) + if self._show_source: + if line_number > len(self.lines): + line = '' + else: + line = self.lines[line_number - 1] + print(line.rstrip()) + print(re.sub(r'\S', ' ', line[:offset]) + '^') + if self._show_pep8 and doc: + print(' ' + doc.strip()) + + # stdout is block buffered when not stdout.isatty(). + # line can be broken where buffer boundary since other processes + # write to same file. + # flush() after print() to avoid buffer boundary. + # Typical buffer size is 8192. line written safely when + # len(line) < 8192. + sys.stdout.flush() + return self.file_errors + + +class DiffReport(StandardReport): + """Collect and print the results for the changed lines only.""" + + def __init__(self, options): + super(DiffReport, self).__init__(options) + self._selected = options.selected_lines + + def error(self, line_number, offset, text, check): + if line_number not in self._selected[self.filename]: + return + return super(DiffReport, self).error(line_number, offset, text, check) + + +class StyleGuide(object): + """Initialize a PEP-8 instance with few options.""" + + def __init__(self, *args, **kwargs): + # build options from the command line + self.checker_class = kwargs.pop('checker_class', Checker) + parse_argv = kwargs.pop('parse_argv', False) + config_file = kwargs.pop('config_file', False) + parser = kwargs.pop('parser', None) + # build options from dict + options_dict = dict(*args, **kwargs) + arglist = None if parse_argv else options_dict.get('paths', None) + options, self.paths = process_options( + arglist, parse_argv, config_file, parser) + if options_dict: + options.__dict__.update(options_dict) + if 'paths' in options_dict: + self.paths = options_dict['paths'] + + self.runner = self.input_file + self.options = options + + if not options.reporter: + options.reporter = BaseReport if options.quiet else StandardReport + + options.select = tuple(options.select or ()) + if not (options.select or options.ignore or + options.testsuite or options.doctest) and DEFAULT_IGNORE: + # The default choice: ignore controversial checks + options.ignore = tuple(DEFAULT_IGNORE.split(',')) + else: + # Ignore all checks which are not explicitly selected + options.ignore = ('',) if options.select else tuple(options.ignore) + options.benchmark_keys = BENCHMARK_KEYS[:] + options.ignore_code = self.ignore_code + options.physical_checks = self.get_checks('physical_line') + options.logical_checks = self.get_checks('logical_line') + options.ast_checks = self.get_checks('tree') + self.init_report() + + def init_report(self, reporter=None): + """Initialize the report instance.""" + self.options.report = (reporter or self.options.reporter)(self.options) + return self.options.report + + def check_files(self, paths=None): + """Run all checks on the paths.""" + if paths is None: + paths = self.paths + report = self.options.report + runner = self.runner + report.start() + try: + for path in paths: + if os.path.isdir(path): + self.input_dir(path) + elif not self.excluded(path): + runner(path) + except KeyboardInterrupt: + print('... stopped') + report.stop() + return report + + def input_file(self, filename, lines=None, expected=None, line_offset=0): + """Run all checks on a Python source file.""" + if self.options.verbose: + print('checking %s' % filename) + fchecker = self.checker_class( + filename, lines=lines, options=self.options) + return fchecker.check_all(expected=expected, line_offset=line_offset) + + def input_dir(self, dirname): + """Check all files in this directory and all subdirectories.""" + dirname = dirname.rstrip('/') + if self.excluded(dirname): + return 0 + counters = self.options.report.counters + verbose = self.options.verbose + filepatterns = self.options.filename + runner = self.runner + for root, dirs, files in os.walk(dirname): + if verbose: + print('directory ' + root) + counters['directories'] += 1 + for subdir in sorted(dirs): + if self.excluded(subdir, root): + dirs.remove(subdir) + for filename in sorted(files): + # contain a pattern that matches? + if ((filename_match(filename, filepatterns) and + not self.excluded(filename, root))): + runner(os.path.join(root, filename)) + + def excluded(self, filename, parent=None): + """Check if the file should be excluded. + + Check if 'options.exclude' contains a pattern that matches filename. + """ + if not self.options.exclude: + return False + basename = os.path.basename(filename) + if filename_match(basename, self.options.exclude): + return True + if parent: + filename = os.path.join(parent, filename) + filename = os.path.abspath(filename) + return filename_match(filename, self.options.exclude) + + def ignore_code(self, code): + """Check if the error code should be ignored. + + If 'options.select' contains a prefix of the error code, + return False. Else, if 'options.ignore' contains a prefix of + the error code, return True. + """ + if len(code) < 4 and any(s.startswith(code) + for s in self.options.select): + return False + return (code.startswith(self.options.ignore) and + not code.startswith(self.options.select)) + + def get_checks(self, argument_name): + """Get all the checks for this category. + + Find all globally visible functions where the first argument name + starts with argument_name and which contain selected tests. + """ + checks = [] + for check, attrs in _checks[argument_name].items(): + (codes, args) = attrs + if any(not (code and self.ignore_code(code)) for code in codes): + checks.append((check.__name__, check, args)) + return sorted(checks) + + +def get_parser(prog='pycodestyle', version=__version__): + """Create the parser for the program.""" + parser = OptionParser(prog=prog, version=version, + usage="%prog [options] input ...") + parser.config_options = [ + 'exclude', 'filename', 'select', 'ignore', 'max-line-length', + 'hang-closing', 'count', 'format', 'quiet', 'show-pep8', + 'show-source', 'statistics', 'verbose'] + parser.add_option('-v', '--verbose', default=0, action='count', + help="print status messages, or debug with -vv") + parser.add_option('-q', '--quiet', default=0, action='count', + help="report only file names, or nothing with -qq") + parser.add_option('-r', '--repeat', default=True, action='store_true', + help="(obsolete) show all occurrences of the same error") + parser.add_option('--first', action='store_false', dest='repeat', + help="show first occurrence of each error") + parser.add_option('--exclude', metavar='patterns', default=DEFAULT_EXCLUDE, + help="exclude files or directories which match these " + "comma separated patterns (default: %default)") + parser.add_option('--filename', metavar='patterns', default='*.py', + help="when parsing directories, only check filenames " + "matching these comma separated patterns " + "(default: %default)") + parser.add_option('--select', metavar='errors', default='', + help="select errors and warnings (e.g. E,W6)") + parser.add_option('--ignore', metavar='errors', default='', + help="skip errors and warnings (e.g. E4,W) " + "(default: %s)" % DEFAULT_IGNORE) + parser.add_option('--show-source', action='store_true', + help="show source code for each error") + parser.add_option('--show-pep8', action='store_true', + help="show text of PEP 8 for each error " + "(implies --first)") + parser.add_option('--statistics', action='store_true', + help="count errors and warnings") + parser.add_option('--count', action='store_true', + help="print total number of errors and warnings " + "to standard error and set exit code to 1 if " + "total is not null") + parser.add_option('--max-line-length', type='int', metavar='n', + default=MAX_LINE_LENGTH, + help="set maximum allowed line length " + "(default: %default)") + parser.add_option('--hang-closing', action='store_true', + help="hang closing bracket instead of matching " + "indentation of opening bracket's line") + parser.add_option('--format', metavar='format', default='default', + help="set the error format [default|pylint|]") + parser.add_option('--diff', action='store_true', + help="report changes only within line number ranges in " + "the unified diff received on STDIN") + group = parser.add_option_group("Testing Options") + if os.path.exists(TESTSUITE_PATH): + group.add_option('--testsuite', metavar='dir', + help="run regression tests from dir") + group.add_option('--doctest', action='store_true', + help="run doctest on myself") + group.add_option('--benchmark', action='store_true', + help="measure processing speed") + return parser + + +def read_config(options, args, arglist, parser): + """Read and parse configurations. + + If a config file is specified on the command line with the "--config" + option, then only it is used for configuration. + + Otherwise, the user configuration (~/.config/pycodestyle) and any local + configurations in the current directory or above will be merged together + (in that order) using the read method of ConfigParser. + """ + config = RawConfigParser() + + cli_conf = options.config + + local_dir = os.curdir + + if USER_CONFIG and os.path.isfile(USER_CONFIG): + if options.verbose: + print('user configuration: %s' % USER_CONFIG) + config.read(USER_CONFIG) + + parent = tail = args and os.path.abspath(os.path.commonprefix(args)) + while tail: + if config.read(os.path.join(parent, fn) for fn in PROJECT_CONFIG): + local_dir = parent + if options.verbose: + print('local configuration: in %s' % parent) + break + (parent, tail) = os.path.split(parent) + + if cli_conf and os.path.isfile(cli_conf): + if options.verbose: + print('cli configuration: %s' % cli_conf) + config.read(cli_conf) + + pycodestyle_section = None + if config.has_section(parser.prog): + pycodestyle_section = parser.prog + elif config.has_section('pep8'): + pycodestyle_section = 'pep8' # Deprecated + warnings.warn('[pep8] section is deprecated. Use [pycodestyle].') + + if pycodestyle_section: + option_list = dict([(o.dest, o.type or o.action) + for o in parser.option_list]) + + # First, read the default values + (new_options, __) = parser.parse_args([]) + + # Second, parse the configuration + for opt in config.options(pycodestyle_section): + if opt.replace('_', '-') not in parser.config_options: + print(" unknown option '%s' ignored" % opt) + continue + if options.verbose > 1: + print(" %s = %s" % (opt, + config.get(pycodestyle_section, opt))) + normalized_opt = opt.replace('-', '_') + opt_type = option_list[normalized_opt] + if opt_type in ('int', 'count'): + value = config.getint(pycodestyle_section, opt) + elif opt_type in ('store_true', 'store_false'): + value = config.getboolean(pycodestyle_section, opt) + else: + value = config.get(pycodestyle_section, opt) + if normalized_opt == 'exclude': + value = normalize_paths(value, local_dir) + setattr(new_options, normalized_opt, value) + + # Third, overwrite with the command-line options + (options, __) = parser.parse_args(arglist, values=new_options) + options.doctest = options.testsuite = False + return options + + +def process_options(arglist=None, parse_argv=False, config_file=None, + parser=None): + """Process options passed either via arglist or via command line args. + + Passing in the ``config_file`` parameter allows other tools, such as flake8 + to specify their own options to be processed in pycodestyle. + """ + if not parser: + parser = get_parser() + if not parser.has_option('--config'): + group = parser.add_option_group("Configuration", description=( + "The project options are read from the [%s] section of the " + "tox.ini file or the setup.cfg file located in any parent folder " + "of the path(s) being processed. Allowed options are: %s." % + (parser.prog, ', '.join(parser.config_options)))) + group.add_option('--config', metavar='path', default=config_file, + help="user config file location") + # Don't read the command line if the module is used as a library. + if not arglist and not parse_argv: + arglist = [] + # If parse_argv is True and arglist is None, arguments are + # parsed from the command line (sys.argv) + (options, args) = parser.parse_args(arglist) + options.reporter = None + + if options.ensure_value('testsuite', False): + args.append(options.testsuite) + elif not options.ensure_value('doctest', False): + if parse_argv and not args: + if options.diff or any(os.path.exists(name) + for name in PROJECT_CONFIG): + args = ['.'] + else: + parser.error('input not specified') + options = read_config(options, args, arglist, parser) + options.reporter = parse_argv and options.quiet == 1 and FileReport + + options.filename = _parse_multi_options(options.filename) + options.exclude = normalize_paths(options.exclude) + options.select = _parse_multi_options(options.select) + options.ignore = _parse_multi_options(options.ignore) + + if options.diff: + options.reporter = DiffReport + stdin = stdin_get_value() + options.selected_lines = parse_udiff(stdin, options.filename, args[0]) + args = sorted(options.selected_lines) + + return options, args + + +def _parse_multi_options(options, split_token=','): + r"""Split and strip and discard empties. + + Turns the following: + + A, + B, + + into ["A", "B"] + """ + if options: + return [o.strip() for o in options.split(split_token) if o.strip()] + else: + return options + + +def _main(): + """Parse options and run checks on Python source.""" + import signal + + # Handle "Broken pipe" gracefully + try: + signal.signal(signal.SIGPIPE, lambda signum, frame: sys.exit(1)) + except AttributeError: + pass # not supported on Windows + + style_guide = StyleGuide(parse_argv=True) + options = style_guide.options + + if options.doctest or options.testsuite: + from testsuite.support import run_tests + report = run_tests(style_guide) + else: + report = style_guide.check_files() + + if options.statistics: + report.print_statistics() + + if options.benchmark: + report.print_benchmark() + + if options.testsuite and not options.quiet: + report.print_results() + + if report.total_errors: + if options.count: + sys.stderr.write(str(report.total_errors) + '\n') + sys.exit(1) + + +if __name__ == '__main__': + _main() diff --git a/nets/LS3DCG.py b/nets/LS3DCG.py new file mode 100644 index 0000000000000000000000000000000000000000..76510dd250d83db4f5af11b5a3fad5db77f2bb1d --- /dev/null +++ b/nets/LS3DCG.py @@ -0,0 +1,337 @@ +''' +not exactly the same as the official repo but the results are good +''' +import sys +import os + +from data_utils.lower_body import c_index_3d + +sys.path.append(os.getcwd()) + +import numpy as np +import torch +import torch.nn as nn +import torch.optim as optim +import torch.nn.functional as F +import math + +from nets.base import TrainWrapperBaseClass +from nets.layers import SeqEncoder1D +from losses import KeypointLoss, L1Loss, KLLoss +from data_utils.utils import get_melspec, get_mfcc_psf, get_mfcc_ta +from nets.utils import denormalize + + +class Decoder(nn.Module): + def __init__(self, in_ch, out_ch): + super(Deocoder, self).__init__() + self.up1 = nn.Sequential( + ConvNormRelu(in_ch // 2 + in_ch, in_ch // 2), + ConvNormRelu(in_ch // 2, in_ch // 2), + nn.Upsample(scale_factor=2, mode='nearest') + ) + self.up2 = nn.Sequential( + ConvNormRelu(in_ch // 4 + in_ch // 2, in_ch // 4), + ConvNormRelu(in_ch // 4, in_ch // 4), + nn.Upsample(scale_factor=2, mode='nearest') + ) + self.up3 = nn.Sequential( + ConvNormRelu(in_ch // 8 + in_ch // 4, in_ch // 8), + ConvNormRelu(in_ch // 8, in_ch // 8), + nn.Conv1d(in_ch // 8, out_ch, 1, 1) + ) + + def forward(self, x, x1, x2, x3): + x = F.interpolate(x, x3.shape[2]) + x = torch.cat([x, x3], dim=1) + x = self.up1(x) + x = F.interpolate(x, x2.shape[2]) + x = torch.cat([x, x2], dim=1) + x = self.up2(x) + x = F.interpolate(x, x1.shape[2]) + x = torch.cat([x, x1], dim=1) + x = self.up3(x) + return x + + +class EncoderDecoder(nn.Module): + def __init__(self, n_frames, each_dim): + super().__init__() + self.n_frames = n_frames + + self.down1 = nn.Sequential( + ConvNormRelu(64, 64, '1d', False), + ConvNormRelu(64, 128, '1d', False), + ) + self.down2 = nn.Sequential( + ConvNormRelu(128, 128, '1d', False), + ConvNormRelu(128, 256, '1d', False), + ) + self.down3 = nn.Sequential( + ConvNormRelu(256, 256, '1d', False), + ConvNormRelu(256, 512, '1d', False), + ) + self.down4 = nn.Sequential( + ConvNormRelu(512, 512, '1d', False), + ConvNormRelu(512, 1024, '1d', False), + ) + + self.down = nn.MaxPool1d(kernel_size=2) + self.up = nn.Upsample(scale_factor=2, mode='nearest') + + self.face_decoder = Decoder(1024, each_dim[0] + each_dim[3]) + self.body_decoder = Decoder(1024, each_dim[1]) + self.hand_decoder = Decoder(1024, each_dim[2]) + + def forward(self, spectrogram, time_steps=None): + if time_steps is None: + time_steps = self.n_frames + + x1 = self.down1(spectrogram) + x = self.down(x1) + x2 = self.down2(x) + x = self.down(x2) + x3 = self.down3(x) + x = self.down(x3) + x = self.down4(x) + x = self.up(x) + + face = self.face_decoder(x, x1, x2, x3) + body = self.body_decoder(x, x1, x2, x3) + hand = self.hand_decoder(x, x1, x2, x3) + + return face, body, hand + + +class Generator(nn.Module): + def __init__(self, + each_dim, + training=False, + device=None + ): + super().__init__() + + self.training = training + self.device = device + + self.encoderdecoder = EncoderDecoder(15, each_dim) + + def forward(self, in_spec, time_steps=None): + if time_steps is not None: + self.gen_length = time_steps + + face, body, hand = self.encoderdecoder(in_spec) + out = torch.cat([face, body, hand], dim=1) + out = out.transpose(1, 2) + + return out + + +class Discriminator(nn.Module): + def __init__(self, input_dim): + super().__init__() + self.net = nn.Sequential( + ConvNormRelu(input_dim, 128, '1d'), + ConvNormRelu(128, 256, '1d'), + nn.MaxPool1d(kernel_size=2), + ConvNormRelu(256, 256, '1d'), + ConvNormRelu(256, 512, '1d'), + nn.MaxPool1d(kernel_size=2), + ConvNormRelu(512, 512, '1d'), + ConvNormRelu(512, 1024, '1d'), + nn.MaxPool1d(kernel_size=2), + nn.Conv1d(1024, 1, 1, 1), + nn.Sigmoid() + ) + + def forward(self, x): + x = x.transpose(1, 2) + + out = self.net(x) + return out + + +class TrainWrapper(TrainWrapperBaseClass): + def __init__(self, args, config) -> None: + self.args = args + self.config = config + self.device = torch.device(self.args.gpu) + self.global_step = 0 + self.init_params() + + self.generator = Generator( + each_dim=self.each_dim, + training=not self.args.infer, + device=self.device, + ).to(self.device) + self.discriminator = Discriminator( + input_dim=self.each_dim[1] + self.each_dim[2] + 64 + ).to(self.device) + self.c_index = c_index_3d + self.MSELoss = KeypointLoss().to(self.device) + self.L1Loss = L1Loss().to(self.device) + self.KLLoss = KLLoss(kl_tolerance=self.config.Train.weights.kl_tolerance).to(self.device) + super().__init__(args, config) + + def init_params(self): + scale = 1 + + global_orient = round(0 * scale) + leye_pose = reye_pose = round(0 * scale) + jaw_pose = round(3 * scale) + body_pose = round((63 - 24) * scale) + left_hand_pose = right_hand_pose = round(45 * scale) + + expression = 100 + + b_j = 0 + jaw_dim = jaw_pose + b_e = b_j + jaw_dim + eye_dim = leye_pose + reye_pose + b_b = b_e + eye_dim + body_dim = global_orient + body_pose + b_h = b_b + body_dim + hand_dim = left_hand_pose + right_hand_pose + b_f = b_h + hand_dim + face_dim = expression + + self.dim_list = [b_j, b_e, b_b, b_h, b_f] + self.full_dim = jaw_dim + eye_dim + body_dim + hand_dim + self.pose = int(self.full_dim / round(3 * scale)) + self.each_dim = [jaw_dim, eye_dim + body_dim, hand_dim, face_dim] + + def __call__(self, bat): + assert (not self.args.infer), "infer mode" + self.global_step += 1 + + loss_dict = {} + + aud, poses = bat['aud_feat'].to(self.device).to(torch.float32), bat['poses'].to(self.device).to(torch.float32) + expression = bat['expression'].to(self.device).to(torch.float32) + jaw = poses[:, :3, :] + poses = poses[:, self.c_index, :] + + pred = self.generator(in_spec=aud) + + D_loss, D_loss_dict = self.get_loss( + pred_poses=pred.detach(), + gt_poses=poses, + aud=aud, + mode='training_D', + ) + + self.discriminator_optimizer.zero_grad() + D_loss.backward() + self.discriminator_optimizer.step() + + G_loss, G_loss_dict = self.get_loss( + pred_poses=pred, + gt_poses=poses, + aud=aud, + expression=expression, + jaw=jaw, + mode='training_G', + ) + self.generator_optimizer.zero_grad() + G_loss.backward() + self.generator_optimizer.step() + + total_loss = None + loss_dict = {} + for key in list(D_loss_dict.keys()) + list(G_loss_dict.keys()): + loss_dict[key] = G_loss_dict.get(key, 0) + D_loss_dict.get(key, 0) + + return total_loss, loss_dict + + def get_loss(self, + pred_poses, + gt_poses, + aud=None, + jaw=None, + expression=None, + mode='training_G', + ): + loss_dict = {} + aud = aud.transpose(1, 2) + gt_poses = gt_poses.transpose(1, 2) + gt_aud = torch.cat([gt_poses, aud], dim=2) + pred_aud = torch.cat([pred_poses[:, :, 103:], aud], dim=2) + + if mode == 'training_D': + dis_real = self.discriminator(gt_aud) + dis_fake = self.discriminator(pred_aud) + dis_error = self.MSELoss(torch.ones_like(dis_real).to(self.device), dis_real) + self.MSELoss( + torch.zeros_like(dis_fake).to(self.device), dis_fake) + loss_dict['dis'] = dis_error + + return dis_error, loss_dict + elif mode == 'training_G': + jaw_loss = self.L1Loss(pred_poses[:, :, :3], jaw.transpose(1, 2)) + face_loss = self.MSELoss(pred_poses[:, :, 3:103], expression.transpose(1, 2)) + body_loss = self.L1Loss(pred_poses[:, :, 103:142], gt_poses[:, :, :39]) + hand_loss = self.L1Loss(pred_poses[:, :, 142:], gt_poses[:, :, 39:]) + l1_loss = jaw_loss + face_loss + body_loss + hand_loss + + dis_output = self.discriminator(pred_aud) + gen_error = self.MSELoss(torch.ones_like(dis_output).to(self.device), dis_output) + gen_loss = self.config.Train.weights.keypoint_loss_weight * l1_loss + self.config.Train.weights.gan_loss_weight * gen_error + + loss_dict['gen'] = gen_error + loss_dict['jaw_loss'] = jaw_loss + loss_dict['face_loss'] = face_loss + loss_dict['body_loss'] = body_loss + loss_dict['hand_loss'] = hand_loss + return gen_loss, loss_dict + else: + raise ValueError(mode) + + def infer_on_audio(self, aud_fn, fps=30, initial_pose=None, norm_stats=None, id=None, B=1, **kwargs): + output = [] + assert self.args.infer, "train mode" + self.generator.eval() + + if self.config.Data.pose.normalization: + assert norm_stats is not None + data_mean = norm_stats[0] + data_std = norm_stats[1] + + pre_length = self.config.Data.pose.pre_pose_length + generate_length = self.config.Data.pose.generate_length + # assert pre_length == initial_pose.shape[-1] + # pre_poses = initial_pose.permute(0, 2, 1).to(self.device).to(torch.float32) + # B = pre_poses.shape[0] + + aud_feat = get_mfcc_ta(aud_fn, sr=22000, fps=fps, smlpx=True, type='mfcc').transpose(1, 0) + num_poses_to_generate = aud_feat.shape[-1] + aud_feat = aud_feat[np.newaxis, ...].repeat(B, axis=0) + aud_feat = torch.tensor(aud_feat, dtype=torch.float32).to(self.device) + + with torch.no_grad(): + pred_poses = self.generator(aud_feat) + pred_poses = pred_poses.cpu().numpy() + output = pred_poses.squeeze() + + return output + + def generate(self, aud, id): + self.generator.eval() + pred_poses = self.generator(aud) + return pred_poses + + +if __name__ == '__main__': + from trainer.options import parse_args + + parser = parse_args() + args = parser.parse_args( + ['--exp_name', '0', '--data_root', '0', '--speakers', '0', '--pre_pose_length', '4', '--generate_length', '64', + '--infer']) + + generator = TrainWrapper(args) + + aud_fn = '../sample_audio/jon.wav' + initial_pose = torch.randn(64, 108, 4) + norm_stats = (np.random.randn(108), np.random.randn(108)) + output = generator.infer_on_audio(aud_fn, initial_pose, norm_stats) + + print(output.shape) diff --git a/nets/__init__.py b/nets/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0669d82d7506b314fa3fccd7dd412445fa0b37e1 --- /dev/null +++ b/nets/__init__.py @@ -0,0 +1,8 @@ +from .smplx_face import TrainWrapper as s2g_face +from .smplx_body_vq import TrainWrapper as s2g_body_vq +from .smplx_body_pixel import TrainWrapper as s2g_body_pixel +from .body_ae import TrainWrapper as s2g_body_ae +from .LS3DCG import TrainWrapper as LS3DCG +from .base import TrainWrapperBaseClass + +from .utils import normalize, denormalize \ No newline at end of file diff --git a/nets/__pycache__/LS3DCG.cpython-37.pyc b/nets/__pycache__/LS3DCG.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4240ad2d6f05801a5ababdf3aca215559325bf0f Binary files /dev/null and b/nets/__pycache__/LS3DCG.cpython-37.pyc differ diff --git a/nets/__pycache__/__init__.cpython-37.pyc b/nets/__pycache__/__init__.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dd917cb082370d87639cb89a717f891e26f62151 Binary files /dev/null and b/nets/__pycache__/__init__.cpython-37.pyc differ diff --git a/nets/__pycache__/base.cpython-37.pyc b/nets/__pycache__/base.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..debc3fc429959357bbf420c9f309b65e4524befa Binary files /dev/null and b/nets/__pycache__/base.cpython-37.pyc differ diff --git a/nets/__pycache__/body_ae.cpython-37.pyc b/nets/__pycache__/body_ae.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..713c8bc698b2352998e2f8c81f996f7adc6ad0c8 Binary files /dev/null and b/nets/__pycache__/body_ae.cpython-37.pyc differ diff --git a/nets/__pycache__/init_model.cpython-37.pyc b/nets/__pycache__/init_model.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6b55383f701fda43ecb46d0b83bb3f398b9036a2 Binary files /dev/null and b/nets/__pycache__/init_model.cpython-37.pyc differ diff --git a/nets/__pycache__/layers.cpython-37.pyc b/nets/__pycache__/layers.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d499452cb20ae6c71cc1053e2251878fe23bc91c Binary files /dev/null and b/nets/__pycache__/layers.cpython-37.pyc differ diff --git a/nets/__pycache__/smplx_body_pixel.cpython-37.pyc b/nets/__pycache__/smplx_body_pixel.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..884fb01533f94405c44204dbb5f460c09a908c29 Binary files /dev/null and b/nets/__pycache__/smplx_body_pixel.cpython-37.pyc differ diff --git a/nets/__pycache__/smplx_body_vq.cpython-37.pyc b/nets/__pycache__/smplx_body_vq.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..473d52aa013059ef0a2e34ee53ebda25f4dd4e24 Binary files /dev/null and b/nets/__pycache__/smplx_body_vq.cpython-37.pyc differ diff --git a/nets/__pycache__/smplx_face.cpython-37.pyc b/nets/__pycache__/smplx_face.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f76958d88b56440c34b310cfcb21c056836ea56f Binary files /dev/null and b/nets/__pycache__/smplx_face.cpython-37.pyc differ diff --git a/nets/__pycache__/utils.cpython-37.pyc b/nets/__pycache__/utils.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5d740b32bc547875d6df84a521cf33472a3634b9 Binary files /dev/null and b/nets/__pycache__/utils.cpython-37.pyc differ diff --git a/nets/base.py b/nets/base.py new file mode 100644 index 0000000000000000000000000000000000000000..08c07caa27ba642dd5a48cebfcf51e4e79edd574 --- /dev/null +++ b/nets/base.py @@ -0,0 +1,89 @@ +import torch +import torch.nn as nn +import torch.optim as optim + +class TrainWrapperBaseClass(): + def __init__(self, args, config) -> None: + self.init_optimizer() + + def init_optimizer(self) -> None: + print('using Adam') + self.generator_optimizer = optim.Adam( + self.generator.parameters(), + lr = self.config.Train.learning_rate.generator_learning_rate, + betas=[0.9, 0.999] + ) + if self.discriminator is not None: + self.discriminator_optimizer = optim.Adam( + self.discriminator.parameters(), + lr = self.config.Train.learning_rate.discriminator_learning_rate, + betas=[0.9, 0.999] + ) + + def __call__(self, bat): + raise NotImplementedError + + def get_loss(self, **kwargs): + raise NotImplementedError + + def state_dict(self): + model_state = { + 'generator': self.generator.state_dict(), + 'generator_optim': self.generator_optimizer.state_dict(), + 'discriminator': self.discriminator.state_dict() if self.discriminator is not None else None, + 'discriminator_optim': self.discriminator_optimizer.state_dict() if self.discriminator is not None else None + } + return model_state + + def parameters(self): + return self.generator.parameters() + + def load_state_dict(self, state_dict): + if 'generator' in state_dict: + self.generator.load_state_dict(state_dict['generator']) + else: + self.generator.load_state_dict(state_dict) + + if 'generator_optim' in state_dict and self.generator_optimizer is not None: + self.generator_optimizer.load_state_dict(state_dict['generator_optim']) + + if self.discriminator is not None: + self.discriminator.load_state_dict(state_dict['discriminator']) + + if 'discriminator_optim' in state_dict and self.discriminator_optimizer is not None: + self.discriminator_optimizer.load_state_dict(state_dict['discriminator_optim']) + + def infer_on_audio(self, aud_fn, initial_pose=None, norm_stats=None, **kwargs): + raise NotImplementedError + + def init_params(self): + if self.config.Data.pose.convert_to_6d: + scale = 2 + else: + scale = 1 + + global_orient = round(0 * scale) + leye_pose = reye_pose = round(0 * scale) + jaw_pose = round(0 * scale) + body_pose = round((63 - 24) * scale) + left_hand_pose = right_hand_pose = round(45 * scale) + if self.expression: + expression = 100 + else: + expression = 0 + + b_j = 0 + jaw_dim = jaw_pose + b_e = b_j + jaw_dim + eye_dim = leye_pose + reye_pose + b_b = b_e + eye_dim + body_dim = global_orient + body_pose + b_h = b_b + body_dim + hand_dim = left_hand_pose + right_hand_pose + b_f = b_h + hand_dim + face_dim = expression + + self.dim_list = [b_j, b_e, b_b, b_h, b_f] + self.full_dim = jaw_dim + eye_dim + body_dim + hand_dim + self.pose = int(self.full_dim / round(3 * scale)) + self.each_dim = [jaw_dim, eye_dim + body_dim, hand_dim, face_dim] \ No newline at end of file diff --git a/nets/body_ae.py b/nets/body_ae.py new file mode 100644 index 0000000000000000000000000000000000000000..3a9f8bc0ee92f8410da71d711bb19dbcc254d1af --- /dev/null +++ b/nets/body_ae.py @@ -0,0 +1,152 @@ +import os +import sys + +sys.path.append(os.getcwd()) + +from nets.base import TrainWrapperBaseClass +from nets.spg.s2glayers import Discriminator as D_S2G +from nets.spg.vqvae_1d import AE as s2g_body +import torch +import torch.optim as optim +import torch.nn.functional as F + +from data_utils.lower_body import c_index, c_index_3d, c_index_6d + + +def separate_aa(aa): + aa = aa[:, :, :].reshape(aa.shape[0], aa.shape[1], -1, 5) + axis = F.normalize(aa[:, :, :, :3], dim=-1) + angle = F.normalize(aa[:, :, :, 3:5], dim=-1) + return axis, angle + + +class TrainWrapper(TrainWrapperBaseClass): + ''' + a wrapper receving a batch from data_utils and calculate loss + ''' + + def __init__(self, args, config): + self.args = args + self.config = config + self.device = torch.device(self.args.gpu) + self.global_step = 0 + + self.gan = False + self.convert_to_6d = self.config.Data.pose.convert_to_6d + self.preleng = self.config.Data.pose.pre_pose_length + self.expression = self.config.Data.pose.expression + self.epoch = 0 + self.init_params() + self.num_classes = 4 + self.g = s2g_body(self.each_dim[1] + self.each_dim[2], embedding_dim=64, num_embeddings=0, + num_hiddens=1024, num_residual_layers=2, num_residual_hiddens=512).to(self.device) + if self.gan: + self.discriminator = D_S2G( + pose_dim=110 + 64, pose=self.pose + ).to(self.device) + else: + self.discriminator = None + + if self.convert_to_6d: + self.c_index = c_index_6d + else: + self.c_index = c_index_3d + + super().__init__(args, config) + + def init_optimizer(self): + + self.g_optimizer = optim.Adam( + self.g.parameters(), + lr=self.config.Train.learning_rate.generator_learning_rate, + betas=[0.9, 0.999] + ) + + def state_dict(self): + model_state = { + 'g': self.g.state_dict(), + 'g_optim': self.g_optimizer.state_dict(), + 'discriminator': self.discriminator.state_dict() if self.discriminator is not None else None, + 'discriminator_optim': self.discriminator_optimizer.state_dict() if self.discriminator is not None else None + } + return model_state + + + def __call__(self, bat): + # assert (not self.args.infer), "infer mode" + self.global_step += 1 + + total_loss = None + loss_dict = {} + + aud, poses = bat['aud_feat'].to(self.device).to(torch.float32), bat['poses'].to(self.device).to(torch.float32) + + # id = bat['speaker'].to(self.device) - 20 + # id = F.one_hot(id, self.num_classes) + + poses = poses[:, self.c_index, :] + gt_poses = poses[:, :, self.preleng:].permute(0, 2, 1) + + loss = 0 + loss_dict, loss = self.vq_train(gt_poses[:, :], 'g', self.g, loss_dict, loss) + + return total_loss, loss_dict + + def vq_train(self, gt, name, model, dict, total_loss, pre=None): + x_recon = model(gt_poses=gt, pre_state=pre) + loss, loss_dict = self.get_loss(pred_poses=x_recon, gt_poses=gt, pre=pre) + # total_loss = total_loss + loss + + if name == 'g': + optimizer_name = 'g_optimizer' + + optimizer = getattr(self, optimizer_name) + optimizer.zero_grad() + loss.backward() + optimizer.step() + + for key in list(loss_dict.keys()): + dict[name + key] = loss_dict.get(key, 0).item() + return dict, total_loss + + def get_loss(self, + pred_poses, + gt_poses, + pre=None + ): + loss_dict = {} + + + rec_loss = torch.mean(torch.abs(pred_poses - gt_poses)) + v_pr = pred_poses[:, 1:] - pred_poses[:, :-1] + v_gt = gt_poses[:, 1:] - gt_poses[:, :-1] + velocity_loss = torch.mean(torch.abs(v_pr - v_gt)) + + if pre is None: + f0_vel = 0 + else: + v0_pr = pred_poses[:, 0] - pre[:, -1] + v0_gt = gt_poses[:, 0] - pre[:, -1] + f0_vel = torch.mean(torch.abs(v0_pr - v0_gt)) + + gen_loss = rec_loss + velocity_loss + f0_vel + + loss_dict['rec_loss'] = rec_loss + loss_dict['velocity_loss'] = velocity_loss + # loss_dict['e_q_loss'] = e_q_loss + if pre is not None: + loss_dict['f0_vel'] = f0_vel + + return gen_loss, loss_dict + + def load_state_dict(self, state_dict): + self.g.load_state_dict(state_dict['g']) + + def extract(self, x): + self.g.eval() + if x.shape[2] > self.full_dim: + if x.shape[2] == 239: + x = x[:, :, 102:] + x = x[:, :, self.c_index] + feat = self.g.encode(x) + return feat.transpose(1, 2), x diff --git a/nets/init_model.py b/nets/init_model.py new file mode 100644 index 0000000000000000000000000000000000000000..ae665607586f7feb8c3aaf9cf420da3c9c677241 --- /dev/null +++ b/nets/init_model.py @@ -0,0 +1,30 @@ +from nets import * + + +def init_model(model_name, args, config): + + if model_name == 's2g_face': + generator = s2g_face( + args, + config, + ) + elif model_name == 's2g_body_vq': + generator = s2g_body_vq( + args, + config, + ) + elif model_name == 's2g_body_pixel': + generator = s2g_body_pixel( + args, + config, + ) + elif model_name == 's2g_body_ae': + generator = s2g_body_ae( + args, + config, + ) + else: + raise ValueError + return generator + + diff --git a/nets/inpainting/gated_pixelcnn_1d.py b/nets/inpainting/gated_pixelcnn_1d.py new file mode 100644 index 0000000000000000000000000000000000000000..62c6ec375b1589afde7ec78a9985e5a875d69363 --- /dev/null +++ b/nets/inpainting/gated_pixelcnn_1d.py @@ -0,0 +1,138 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + + +def weights_init(m): + classname = m.__class__.__name__ + if classname.find('Conv') != -1: + try: + nn.init.xavier_uniform_(m.weight.data) + m.bias.data.fill_(0) + except AttributeError: + print("Skipping initialization of ", classname) + + +class GatedActivation(nn.Module): + def __init__(self): + super().__init__() + + def forward(self, x): + x, y = x.chunk(2, dim=1) + return F.tanh(x) * F.sigmoid(y) + + +class GatedMaskedConv1d(nn.Module): + def __init__(self, mask_type, dim, kernel, residual, n_classes=10): + super().__init__() + assert kernel % 2 == 1, print("Kernel size must be odd") + self.mask_type = mask_type + self.residual = residual + + self.class_cond_embedding = nn.Embedding( + n_classes, 2 * dim + ) + + kernel_shp = (kernel // 2 + 1) # (ceil(n/2), n) + padding_shp = (kernel // 2) + self.vert_stack = nn.Conv1d( + dim, dim * 2, + kernel_shp, 1, padding_shp + ) + + self.gate = GatedActivation() + + if self.residual: + self.res = nn.Conv1d(dim, dim, 1) + + def make_causal(self): + self.vert_stack.weight.data[:, :, -1].zero_() # Mask final row + + def forward(self, x, h): + if self.mask_type == 'A': + self.make_causal() + + h = self.class_cond_embedding(h) + h_vert = self.vert_stack(x) + h_vert = h_vert[:, :, :x.size(-2), :] + out = self.gate(h_vert + h[:, :, None, None]) + + if self.residual: + out = self.res(out) + x + + return out + + +class GatedPixelCNN(nn.Module): + def __init__(self, input_dim=256, dim=64, n_layers=15, n_classes=10): + super().__init__() + self.dim = dim + + self.embedding_aud_mo = nn.Conv1d(512, dim, 1, 1, padding=0) + self.fusion = nn.Conv1d(dim * 2, dim, 1, 1, padding=0) + + # Create embedding layer to embed input + self.embedding = nn.Embedding(input_dim, dim) + + # Building the PixelCNN layer by layer + self.layers = nn.ModuleList() + + # Initial block with Mask-A convolution + # Rest with Mask-B convolutions + for i in range(n_layers): + mask_type = 'A' if i == 0 else 'B' + kernel = 7 if i == 0 else 3 + residual = False if i == 0 else True + + self.layers.append( + GatedMaskedConv1d(mask_type, dim, kernel, residual, n_classes) + ) + + # Add the output layer + self.output_conv = nn.Sequential( + nn.Conv1d(dim, 512, 1), + nn.ReLU(True), + nn.Conv1d(512, input_dim, 1) + ) + + self.apply(weights_init) + + self.dp = nn.Dropout(0.1) + + def forward(self, x, label, c): + x = x # (B, C, W) + + for i, layer in enumerate(self.layers): + if i == 1: + c = self.embedding(c) + x = self.fusion(torch.cat([x, c], dim=1)) + x = layer(x, label) + + return self.output_conv(x) + + def generate(self, label, shape=(8, 8), batch_size=64, aud_feat=None, pre_latents=None, pre_audio=None): + param = next(self.parameters()) + x = torch.zeros( + (batch_size, *shape), + dtype=torch.int64, device=param.device + ) + if pre_latents is not None: + x = torch.cat([pre_latents, x], dim=1) + aud_feat = torch.cat([pre_audio, aud_feat], dim=2) + h0 = pre_latents.shape[1] + h = h0 + shape[0] + else: + h0 = 0 + h = shape[0] + + for i in range(h0, h): + for j in range(shape[1]): + if self.audio: + logits = self.forward(x, label, aud_feat) + else: + logits = self.forward(x, label) + probs = F.softmax(logits[:, :, i, j], -1) + x.data[:, i, j].copy_( + probs.multinomial(1).squeeze().data + ) + return x[:, h0:h] diff --git a/nets/inpainting/inpainting_frame.py b/nets/inpainting/inpainting_frame.py new file mode 100644 index 0000000000000000000000000000000000000000..db13710c27107c5b8c1bf468be89f4f1942c6faf --- /dev/null +++ b/nets/inpainting/inpainting_frame.py @@ -0,0 +1,287 @@ +import os +import sys + +import torch +from torch.optim.lr_scheduler import StepLR + +sys.path.append(os.getcwd()) + +from nets.layers import * +from nets.base import TrainWrapperBaseClass +from nets.spg.gated_pixelcnn_v2 import GatedPixelCNN as pixelcnn +from nets.inpainting.vqvae_1d_sc import VQVAE_SC as s2g_body +from nets.spg.vqvae_1d import AudioEncoder +from nets.utils import parse_audio, denormalize +from data_utils import get_mfcc, get_melspec, get_mfcc_old, get_mfcc_psf, get_mfcc_psf_min, get_mfcc_ta +import numpy as np +import torch.optim as optim +import torch.nn.functional as F +from sklearn.preprocessing import normalize + +from data_utils.lower_body import c_index, c_index_3d, c_index_6d +from data_utils.utils import smooth_geom, get_mfcc_sepa + + +class TrainWrapper(TrainWrapperBaseClass): + ''' + a wrapper receving a batch from data_utils and calculate loss + ''' + + def __init__(self, args, config): + self.args = args + self.config = config + self.device = torch.device(self.args.gpu) + self.global_step = 0 + + self.convert_to_6d = self.config.Data.pose.convert_to_6d + self.expression = self.config.Data.pose.expression + self.epoch = 0 + self.init_params() + self.num_classes = 4 + self.audio = True + self.composition = self.config.Model.composition + self.bh_model = self.config.Model.bh_model + + dim, layer = 512, 5 + self.AudEnc = AudioEncoder(in_dim=64, num_hiddens=256, num_residual_layers=2, num_residual_hiddens=256).to( + self.device) + self.Predictor = pixelcnn(2048, dim, layer, self.num_classes, self.audio, self.bh_model).to(self.device) + self.VQ = s2g_body(self.each_dim[1] + self.each_dim[2], embedding_dim=512, num_embeddings=config.Model.code_num, + num_hiddens=1024, num_residual_layers=2, num_residual_hiddens=512).to(self.device) + + self.discriminator = None + if self.convert_to_6d: + self.c_index = c_index_6d + else: + self.c_index = c_index_3d + + super().__init__(args, config) + + def init_optimizer(self): + + print('using Adam') + self.generator_optimizer = optim.Adam( + self.parameters(), + lr=self.config.Train.learning_rate.generator_learning_rate, + betas=[0.9, 0.999] + ) + + def state_dict(self): + model_state = { + 'AudEnc': self.AudEnc.state_dict(), + 'Predictor': self.Predictor.state_dict(), + 'VQ': self.VQ.state_dict(), + 'generator_optim': self.generator_optimizer.state_dict(), + } + return model_state + + def load_state_dict(self, state_dict): + + from collections import OrderedDict + new_state_dict = OrderedDict() # create new OrderedDict that does not contain `module.` + for k, v in state_dict.items(): + sub_dict = OrderedDict() + if v is not None: + for k1, v1 in v.items(): + name = k1.replace('module.', '') + sub_dict[name] = v1 + new_state_dict[k] = sub_dict + state_dict = new_state_dict + + if 'AudEnc' in state_dict: + self.AudEnc.load_state_dict(state_dict['AudEnc']) + if 'Predictor' in state_dict: + self.Predictor.load_state_dict(state_dict['Predictor']) + if 'VQ' in state_dict: + self.VQ.load_state_dict(state_dict['VQ']) + + if 'generator_optim' in state_dict: + self.generator_optimizer.load_state_dict(state_dict['generator_optim']) + + def init_params(self): + if self.config.Data.pose.convert_to_6d: + scale = 2 + else: + scale = 1 + + global_orient = round(0 * scale) + leye_pose = reye_pose = round(0 * scale) + jaw_pose = round(0 * scale) + body_pose = round((63 - 24) * scale) + left_hand_pose = right_hand_pose = round(45 * scale) + if self.expression: + expression = 100 + else: + expression = 0 + + b_j = 0 + jaw_dim = jaw_pose + b_e = b_j + jaw_dim + eye_dim = leye_pose + reye_pose + b_b = b_e + eye_dim + body_dim = global_orient + body_pose + b_h = b_b + body_dim + hand_dim = left_hand_pose + right_hand_pose + b_f = b_h + hand_dim + face_dim = expression + + self.dim_list = [b_j, b_e, b_b, b_h, b_f] + self.full_dim = jaw_dim + eye_dim + body_dim + hand_dim + self.pose = int(self.full_dim / round(3 * scale)) + self.each_dim = [jaw_dim, eye_dim + body_dim, hand_dim, face_dim] + + def __call__(self, bat): + # assert (not self.args.infer), "infer mode" + self.global_step += 1 + + total_loss = None + loss_dict = {} + + aud, poses = bat['aud_feat'].to(self.device).to(torch.float32), bat['poses'].to(self.device).to(torch.float32) + + id = bat['speaker'].to(self.device) - 20 + # id = F.one_hot(id, self.num_classes) + + poses = poses[:, self.c_index, :] + + aud = aud.permute(0, 2, 1) + gt_poses = poses.permute(0, 2, 1) + mask = 1 + + input_poses = gt_poses * mask + + z, enc_feats = self.VQ.encode(gt_poses=input_poses) + audio = self.AudEnc(aud[:, :].transpose(1, 2)).unsqueeze(dim=-1) + z = self.Predictor(z, id, audio) + _, e_q_loss, pred_poses = self.VQ.decode(z, enc_feats) + + self.generator_optimizer.zero_grad() + loss, loss_dict = self.get_loss(pred_poses, gt_poses, e_q_loss) + grad = torch.nn.utils.clip_grad_norm(self.parameters(), self.config.Train.max_gradient_norm) + loss.backward() + + if torch.isnan(grad).sum() > 0: + print('fuck') + + loss_dict['grad'] = grad.item() + loss_dict['ce_loss'] = loss.item() + self.generator_optimizer.step() + + return total_loss, loss_dict + + def get_loss(self, + pred_poses, + gt_poses, + e_q_loss, + ): + loss_dict = {} + + rec_loss = torch.mean(torch.abs(pred_poses - gt_poses)) + v_pr = pred_poses[:, 1:] - pred_poses[:, :-1] + v_gt = gt_poses[:, 1:] - gt_poses[:, :-1] + velocity_loss = torch.mean(torch.abs(v_pr - v_gt)) + + gen_loss = rec_loss + e_q_loss + velocity_loss + + loss_dict['rec_loss'] = rec_loss + loss_dict['velocity_loss'] = velocity_loss + loss_dict['e_q_loss'] = e_q_loss + + return gen_loss, loss_dict + + def infer_on_audio(self, aud_fn, initial_pose=None, norm_stats=None, exp=None, var=None, w_pre=False, rand=None, + continuity=False, id=None, fps=15, sr=22000, B=1, am=None, am_sr=None, frame=0, **kwargs): + ''' + initial_pose: (B, C, T), normalized + (aud_fn, txgfile) -> generated motion (B, T, C) + ''' + output = [] + + assert self.args.infer, "train mode" + self.generator.eval() + self.g_body.eval() + self.g_hand.eval() + + if continuity: + aud_feat, gap = get_mfcc_sepa(aud_fn, sr=sr, fps=fps) + else: + aud_feat = get_mfcc_ta(aud_fn, sr=sr, fps=fps, smlpx=True, type='mfcc', am=am) + aud_feat = aud_feat.transpose(1, 0) + aud_feat = aud_feat[np.newaxis, ...].repeat(B, axis=0) + aud_feat = torch.tensor(aud_feat, dtype=torch.float32).to(self.device) + + if id is None: + id = torch.tensor([0]).to(self.device) + else: + id = id.repeat(B) + + with torch.no_grad(): + aud_feat = aud_feat.permute(0, 2, 1) + if continuity: + self.audioencoder.eval() + pre_pose = {} + pre_pose['b'] = pre_pose['h'] = None + pre_latents, pre_audio, body_0, hand_0 = self.infer(aud_feat[:, :gap], frame, id, B, pre_pose=pre_pose) + pre_pose['b'] = body_0[:, :, -4:].transpose(1, 2) + pre_pose['h'] = hand_0[:, :, -4:].transpose(1, 2) + _, _, body_1, hand_1 = self.infer(aud_feat[:, gap:], frame, id, B, pre_latents, pre_audio, pre_pose) + body = torch.cat([body_0, body_1], dim=2) + hand = torch.cat([hand_0, hand_1], dim=2) + + else: + if self.audio: + self.audioencoder.eval() + audio = self.audioencoder(aud_feat.transpose(1, 2), frame_num=frame).unsqueeze(dim=-1).repeat(1, 1, + 1, 2) + latents = self.generator.generate(id, shape=[audio.shape[2], 2], batch_size=B, aud_feat=audio) + else: + latents = self.generator.generate(id, shape=[aud_feat.shape[1] // 4, 2], batch_size=B) + + body_latents = latents[..., 0] + hand_latents = latents[..., 1] + + body, _ = self.g_body.decode(b=body_latents.shape[0], w=body_latents.shape[1], latents=body_latents) + hand, _ = self.g_hand.decode(b=hand_latents.shape[0], w=hand_latents.shape[1], latents=hand_latents) + + pred_poses = torch.cat([body, hand], dim=1).transpose(1, 2).cpu().numpy() + + output = pred_poses + + return output + + def infer(self, aud_feat, frame, id, B, pre_latents=None, pre_audio=None, pre_pose=None): + audio = self.audioencoder(aud_feat.transpose(1, 2), frame_num=frame).unsqueeze(dim=-1).repeat(1, 1, 1, 2) + latents = self.generator.generate(id, shape=[audio.shape[2], 2], batch_size=B, aud_feat=audio, + pre_latents=pre_latents, pre_audio=pre_audio) + + body_latents = latents[..., 0] + hand_latents = latents[..., 1] + + body, _ = self.g_body.decode(b=body_latents.shape[0], w=body_latents.shape[1], + latents=body_latents, pre_state=pre_pose['b']) + hand, _ = self.g_hand.decode(b=hand_latents.shape[0], w=hand_latents.shape[1], + latents=hand_latents, pre_state=pre_pose['h']) + + return latents, audio, body, hand + + def generate(self, aud, id, frame_num=0): + + self.AudEnc.eval() + self.Predictor.eval() + self.VQ.eval() + aud_feat = aud.permute(0, 2, 1) + + + audio = self.audioencoder(aud_feat.transpose(1, 2), frame_num=frame_num).unsqueeze(dim=-1).repeat(1, 1, 1, + 2) + latents = self.generator.generate(id, shape=[audio.shape[2], 2], batch_size=aud.shape[0], aud_feat=audio) + + + body_latents = latents[..., 0] + hand_latents = latents[..., 1] + + body = self.g_body.decode(b=body_latents.shape[0], w=body_latents.shape[1], latents=body_latents) + hand = self.g_hand.decode(b=hand_latents.shape[0], w=hand_latents.shape[1], latents=hand_latents) + + pred_poses = torch.cat([body, hand], dim=1).transpose(1, 2) + return pred_poses diff --git a/nets/inpainting/vqvae_1d_sc.py b/nets/inpainting/vqvae_1d_sc.py new file mode 100644 index 0000000000000000000000000000000000000000..d2b774eaad3a8d468fdf4cf7982507602a6601e0 --- /dev/null +++ b/nets/inpainting/vqvae_1d_sc.py @@ -0,0 +1,99 @@ +import os +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from nets.spg.vqvae_modules import VectorQuantizerEMA, ConvNormRelu, Res_CNR_Stack +from nets.spg.vqvae_1d import AudioEncoder + + +class EncoderSC(nn.Module): + def __init__(self, in_dim, embedding_dim, num_hiddens, num_residual_layers, num_residual_hiddens): + super(EncoderSC, self).__init__() + self._num_hiddens = num_hiddens + self._num_residual_layers = num_residual_layers + self._num_residual_hiddens = num_residual_hiddens + + self.project = ConvNormRelu(in_dim, self._num_hiddens // 4, leaky=True) + + self._enc_1 = Res_CNR_Stack(self._num_hiddens // 4, self._num_residual_layers, leaky=True) + self._down_1 = ConvNormRelu(self._num_hiddens // 4, self._num_hiddens // 2, leaky=True, residual=True, + sample='down') + self._enc_2 = Res_CNR_Stack(self._num_hiddens // 2, self._num_residual_layers, leaky=True) + self._down_2 = ConvNormRelu(self._num_hiddens // 2, self._num_hiddens, leaky=True, residual=True, sample='down') + self._enc_3 = Res_CNR_Stack(self._num_hiddens, self._num_residual_layers, leaky=True) + + self.pre_vq_conv = nn.Conv1d(self._num_hiddens, embedding_dim, 1, 1) + + def forward(self, x): + out = [] + h = self.project(x) + h = self._enc_1(h) + out[1] = h + h = self._down_1(h) + h = self._enc_2(h) + out[2] = h + h = self._down_2(h) + h = self._enc_3(h) + out[3] = h + h = self.pre_vq_conv(h) + return h, out + + +class DecoderSC(nn.Module): + def __init__(self, out_dim, embedding_dim, num_hiddens, num_residual_layers, num_residual_hiddens, ae=False): + super(DecoderSC, self).__init__() + self._num_hiddens = num_hiddens + self._num_residual_layers = num_residual_layers + self._num_residual_hiddens = num_residual_hiddens + + self.aft_vq_conv = nn.Conv1d(embedding_dim, self._num_hiddens, 1, 1) + + self._dec_1 = Res_CNR_Stack(self._num_hiddens, self._num_residual_layers, leaky=True) + self._up_2 = ConvNormRelu(self._num_hiddens, self._num_hiddens // 2, leaky=True, residual=True, sample='up') + self._dec_2 = Res_CNR_Stack(self._num_hiddens // 2, self._num_residual_layers, leaky=True) + self._up_3 = ConvNormRelu(self._num_hiddens // 2, self._num_hiddens // 4, leaky=True, residual=True, + sample='up') + self._dec_3 = Res_CNR_Stack(self._num_hiddens // 4, self._num_residual_layers, leaky=True) + + self.project = nn.Conv1d(self._num_hiddens // 4, out_dim, 1, 1) + + def forward(self, h, out): + + h = self.aft_vq_conv(h) + h = h + out[3] + h = self._dec_1(h) + h = self._up_2(h) + h = h + out[2] + h = self._dec_2(h) + h = self._up_3(h) + h = h + out[1] + h = self._dec_3(h) + + recon = self.project(h) + return recon + + +class VQVAE_SC(nn.Module): + """VQ-VAE""" + + def __init__(self, in_dim, embedding_dim, num_embeddings, + num_hiddens, num_residual_layers, num_residual_hiddens, + commitment_cost=0.25, decay=0.99, share=False): + super().__init__() + self.in_dim = in_dim + self.embedding_dim = embedding_dim + self.num_embeddings = num_embeddings + + self.encoder = EncoderSC(in_dim, embedding_dim, num_hiddens, num_residual_layers, num_residual_hiddens) + self.vq_layer = VectorQuantizerEMA(embedding_dim, num_embeddings, commitment_cost, decay) + self.decoder = DecoderSC(in_dim, embedding_dim, num_hiddens, num_residual_layers, num_residual_hiddens) + + def encode(self, gt_poses): + z, enc_feats = self.encoder(gt_poses.transpose(1, 2)) + return z, enc_feats + + def decode(self, z, enc_feats): + e, e_q_loss = self.vq_layer(z) + x = self.decoder(e, enc_feats) + return e, e_q_loss, x.transpose(1, 2) diff --git a/nets/layers.py b/nets/layers.py new file mode 100644 index 0000000000000000000000000000000000000000..79251b42b6e0fe839ec04dc38472ef36165208ac --- /dev/null +++ b/nets/layers.py @@ -0,0 +1,1052 @@ +import os +import sys + +sys.path.append(os.getcwd()) + +import torch +import torch.nn as nn +import numpy as np + + +# TODO: be aware of the actual netork structures + +def get_log(x): + log = 0 + while x > 1: + if x % 2 == 0: + x = x // 2 + log += 1 + else: + raise ValueError('x is not a power of 2') + + return log + + +class ConvNormRelu(nn.Module): + ''' + (B,C_in,H,W) -> (B, C_out, H, W) + there exist some kernel size that makes the result is not H/s + #TODO: there might some problems with residual + ''' + + def __init__(self, + in_channels, + out_channels, + type='1d', + leaky=False, + downsample=False, + kernel_size=None, + stride=None, + padding=None, + p=0, + groups=1, + residual=False, + norm='bn'): + ''' + conv-bn-relu + ''' + super(ConvNormRelu, self).__init__() + self.residual = residual + self.norm_type = norm + # kernel_size = k + # stride = s + + if kernel_size is None and stride is None: + if not downsample: + kernel_size = 3 + stride = 1 + else: + kernel_size = 4 + stride = 2 + + if padding is None: + if isinstance(kernel_size, int) and isinstance(stride, tuple): + padding = tuple(int((kernel_size - st) / 2) for st in stride) + elif isinstance(kernel_size, tuple) and isinstance(stride, int): + padding = tuple(int((ks - stride) / 2) for ks in kernel_size) + elif isinstance(kernel_size, tuple) and isinstance(stride, tuple): + padding = tuple(int((ks - st) / 2) for ks, st in zip(kernel_size, stride)) + else: + padding = int((kernel_size - stride) / 2) + + if self.residual: + if downsample: + if type == '1d': + self.residual_layer = nn.Sequential( + nn.Conv1d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding + ) + ) + elif type == '2d': + self.residual_layer = nn.Sequential( + nn.Conv2d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding + ) + ) + else: + if in_channels == out_channels: + self.residual_layer = nn.Identity() + else: + if type == '1d': + self.residual_layer = nn.Sequential( + nn.Conv1d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding + ) + ) + elif type == '2d': + self.residual_layer = nn.Sequential( + nn.Conv2d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding + ) + ) + + in_channels = in_channels * groups + out_channels = out_channels * groups + if type == '1d': + self.conv = nn.Conv1d(in_channels=in_channels, out_channels=out_channels, + kernel_size=kernel_size, stride=stride, padding=padding, + groups=groups) + self.norm = nn.BatchNorm1d(out_channels) + self.dropout = nn.Dropout(p=p) + elif type == '2d': + self.conv = nn.Conv2d(in_channels=in_channels, out_channels=out_channels, + kernel_size=kernel_size, stride=stride, padding=padding, + groups=groups) + self.norm = nn.BatchNorm2d(out_channels) + self.dropout = nn.Dropout2d(p=p) + if norm == 'gn': + self.norm = nn.GroupNorm(2, out_channels) + elif norm == 'ln': + self.norm = nn.LayerNorm(out_channels) + if leaky: + self.relu = nn.LeakyReLU(negative_slope=0.2) + else: + self.relu = nn.ReLU() + + def forward(self, x, **kwargs): + if self.norm_type == 'ln': + out = self.dropout(self.conv(x)) + out = self.norm(out.transpose(1,2)).transpose(1,2) + else: + out = self.norm(self.dropout(self.conv(x))) + if self.residual: + residual = self.residual_layer(x) + out += residual + return self.relu(out) + + +class UNet1D(nn.Module): + def __init__(self, + input_channels, + output_channels, + max_depth=5, + kernel_size=None, + stride=None, + p=0, + groups=1): + super(UNet1D, self).__init__() + self.pre_downsampling_conv = nn.ModuleList([]) + self.conv1 = nn.ModuleList([]) + self.conv2 = nn.ModuleList([]) + self.upconv = nn.Upsample(scale_factor=2, mode='nearest') + self.max_depth = max_depth + self.groups = groups + + self.pre_downsampling_conv.append(ConvNormRelu(input_channels, output_channels, + type='1d', leaky=True, downsample=False, + kernel_size=kernel_size, stride=stride, p=p, groups=groups)) + self.pre_downsampling_conv.append(ConvNormRelu(output_channels, output_channels, + type='1d', leaky=True, downsample=False, + kernel_size=kernel_size, stride=stride, p=p, groups=groups)) + + for i in range(self.max_depth): + self.conv1.append(ConvNormRelu(output_channels, output_channels, + type='1d', leaky=True, downsample=True, + kernel_size=kernel_size, stride=stride, p=p, groups=groups)) + + for i in range(self.max_depth): + self.conv2.append(ConvNormRelu(output_channels, output_channels, + type='1d', leaky=True, downsample=False, + kernel_size=kernel_size, stride=stride, p=p, groups=groups)) + + def forward(self, x): + + input_size = x.shape[-1] + + assert get_log( + input_size) >= self.max_depth, 'num_frames must be a power of 2 and its power must be greater than max_depth' + + x = nn.Sequential(*self.pre_downsampling_conv)(x) + + residuals = [] + residuals.append(x) + for i, conv1 in enumerate(self.conv1): + x = conv1(x) + if i < self.max_depth - 1: + residuals.append(x) + + for i, conv2 in enumerate(self.conv2): + x = self.upconv(x) + residuals[self.max_depth - i - 1] + x = conv2(x) + + return x + + +class UNet2D(nn.Module): + def __init__(self): + super(UNet2D, self).__init__() + raise NotImplementedError('2D Unet is wierd') + + +class AudioPoseEncoder1D(nn.Module): + ''' + (B, C, T) -> (B, C*2, T) -> ... -> (B, C_out, T) + ''' + + def __init__(self, + C_in, + C_out, + kernel_size=None, + stride=None, + min_layer_nums=None + ): + super(AudioPoseEncoder1D, self).__init__() + self.C_in = C_in + self.C_out = C_out + + conv_layers = nn.ModuleList([]) + cur_C = C_in + num_layers = 0 + while cur_C < self.C_out: + conv_layers.append(ConvNormRelu( + in_channels=cur_C, + out_channels=cur_C * 2, + kernel_size=kernel_size, + stride=stride + )) + cur_C *= 2 + num_layers += 1 + + if (cur_C != C_out) or (min_layer_nums is not None and num_layers < min_layer_nums): + while (cur_C != C_out) or num_layers < min_layer_nums: + conv_layers.append(ConvNormRelu( + in_channels=cur_C, + out_channels=C_out, + kernel_size=kernel_size, + stride=stride + )) + num_layers += 1 + cur_C = C_out + + self.conv_layers = nn.Sequential(*conv_layers) + + def forward(self, x): + ''' + x: (B, C, T) + ''' + x = self.conv_layers(x) + return x + + +class AudioPoseEncoder2D(nn.Module): + ''' + (B, C, T) -> (B, 1, C, T) -> ... -> (B, C_out, T) + ''' + + def __init__(self): + raise NotImplementedError + + +class AudioPoseEncoderRNN(nn.Module): + ''' + (B, C, T)->(B, T, C)->(B, T, C_out)->(B, C_out, T) + ''' + + def __init__(self, + C_in, + hidden_size, + num_layers, + rnn_cell='gru', + bidirectional=False + ): + super(AudioPoseEncoderRNN, self).__init__() + if rnn_cell == 'gru': + self.cell = nn.GRU(input_size=C_in, hidden_size=hidden_size, num_layers=num_layers, batch_first=True, + bidirectional=bidirectional) + elif rnn_cell == 'lstm': + self.cell = nn.LSTM(input_size=C_in, hidden_size=hidden_size, num_layers=num_layers, batch_first=True, + bidirectional=bidirectional) + else: + raise ValueError('invalid rnn cell:%s' % (rnn_cell)) + + def forward(self, x, state=None): + + x = x.permute(0, 2, 1) + x, state = self.cell(x, state) + x = x.permute(0, 2, 1) + + return x + + +class AudioPoseEncoderGraph(nn.Module): + ''' + (B, C, T)->(B, 2, V, T)->(B, 2, T, V)->(B, D, T, V) + ''' + + def __init__(self, + layers_config, # 理应是(C_in, C_out, kernel_size)的list + A, # adjacent matrix (num_parts, V, V) + residual, + local_bn=False, + share_weights=False + ) -> None: + super().__init__() + self.A = A + self.num_joints = A.shape[1] + self.num_parts = A.shape[0] + self.C_in = layers_config[0][0] + self.C_out = layers_config[-1][1] + + self.conv_layers = nn.ModuleList([ + GraphConvNormRelu( + C_in=c_in, + C_out=c_out, + A=self.A, + residual=residual, + local_bn=local_bn, + kernel_size=k, + share_weights=share_weights + ) for (c_in, c_out, k) in layers_config + ]) + + self.conv_layers = nn.Sequential(*self.conv_layers) + + def forward(self, x): + ''' + x: (B, C, T), C should be num_joints*D + output: (B, D, T, V) + ''' + B, C, T = x.shape + x = x.view(B, self.num_joints, self.C_in, T) # (B, V, D, T),D:每个joint的特征维度,注意这里V在前面 + x = x.permute(0, 2, 3, 1) # (B, D, T, V) + assert x.shape[1] == self.C_in + + x_conved = self.conv_layers(x) + + # x_conved = x_conved.permute(0, 3, 1, 2).contiguous().view(B, self.C_out*self.num_joints, T)#(B, V*C_out, T) + + return x_conved + + +class SeqEncoder2D(nn.Module): + ''' + seq_encoder, encoding a seq to a vector + (B, C, T)->(B, 2, V, T)->(B, 2, T, V) -> (B, 32, )->...->(B, C_out) + ''' + + def __init__(self, + C_in, # should be 2 + T_in, + C_out, + num_joints, + min_layer_num=None, + residual=False + ): + super(SeqEncoder2D, self).__init__() + self.C_in = C_in + self.C_out = C_out + self.T_in = T_in + self.num_joints = num_joints + + conv_layers = nn.ModuleList([]) + conv_layers.append(ConvNormRelu( + in_channels=C_in, + out_channels=32, + type='2d', + residual=residual + )) + + cur_C = 32 + cur_H = T_in + cur_W = num_joints + num_layers = 1 + while (cur_C < C_out) or (cur_H > 1) or (cur_W > 1): + ks = [3, 3] + st = [1, 1] + + if cur_H > 1: + if cur_H > 4: + ks[0] = 4 + st[0] = 2 + else: + ks[0] = cur_H + st[0] = cur_H + if cur_W > 1: + if cur_W > 4: + ks[1] = 4 + st[1] = 2 + else: + ks[1] = cur_W + st[1] = cur_W + + conv_layers.append(ConvNormRelu( + in_channels=cur_C, + out_channels=min(C_out, cur_C * 2), + type='2d', + kernel_size=tuple(ks), + stride=tuple(st), + residual=residual + )) + cur_C = min(cur_C * 2, C_out) + if cur_H > 1: + if cur_H > 4: + cur_H //= 2 + else: + cur_H = 1 + if cur_W > 1: + if cur_W > 4: + cur_W //= 2 + else: + cur_W = 1 + num_layers += 1 + + if min_layer_num is not None and (num_layers < min_layer_num): + while num_layers < min_layer_num: + conv_layers.append(ConvNormRelu( + in_channels=C_out, + out_channels=C_out, + type='2d', + kernel_size=1, + stride=1, + residual=residual + )) + num_layers += 1 + + self.conv_layers = nn.Sequential(*conv_layers) + self.num_layers = num_layers + + def forward(self, x): + B, C, T = x.shape + x = x.view(B, self.num_joints, self.C_in, T) # (B, V, D, T) V in front + x = x.permute(0, 2, 3, 1) # (B, D, T, V) + assert x.shape[1] == self.C_in and x.shape[-1] == self.num_joints + + x = self.conv_layers(x) + return x.squeeze() + + +class SeqEncoder1D(nn.Module): + ''' + (B, C, T)->(B, D) + ''' + + def __init__(self, + C_in, + C_out, + T_in, + min_layer_nums=None + ): + super(SeqEncoder1D, self).__init__() + conv_layers = nn.ModuleList([]) + cur_C = C_in + cur_T = T_in + self.num_layers = 0 + while (cur_C < C_out) or (cur_T > 1): + ks = 3 + st = 1 + if cur_T > 1: + if cur_T > 4: + ks = 4 + st = 2 + else: + ks = cur_T + st = cur_T + + conv_layers.append(ConvNormRelu( + in_channels=cur_C, + out_channels=min(C_out, cur_C * 2), + type='1d', + kernel_size=ks, + stride=st + )) + cur_C = min(cur_C * 2, C_out) + if cur_T > 1: + if cur_T > 4: + cur_T = cur_T // 2 + else: + cur_T = 1 + self.num_layers += 1 + + if min_layer_nums is not None and (self.num_layers < min_layer_nums): + while self.num_layers < min_layer_nums: + conv_layers.append(ConvNormRelu( + in_channels=C_out, + out_channels=C_out, + type='1d', + kernel_size=1, + stride=1 + )) + self.num_layers += 1 + self.conv_layers = nn.Sequential(*conv_layers) + + def forward(self, x): + x = self.conv_layers(x) + return x.squeeze() + + +class SeqEncoderRNN(nn.Module): + ''' + (B, C, T) -> (B, T, C) -> (B, D) + LSTM/GRU-FC + ''' + + def __init__(self, + hidden_size, + in_size, + num_rnn_layers, + rnn_cell='gru', + bidirectional=False + ): + super(SeqEncoderRNN, self).__init__() + self.hidden_size = hidden_size + self.in_size = in_size + self.num_rnn_layers = num_rnn_layers + self.bidirectional = bidirectional + + if rnn_cell == 'gru': + self.cell = nn.GRU(input_size=self.in_size, hidden_size=self.hidden_size, num_layers=self.num_rnn_layers, + batch_first=True, bidirectional=bidirectional) + elif rnn_cell == 'lstm': + self.cell = nn.LSTM(input_size=self.in_size, hidden_size=self.hidden_size, num_layers=self.num_rnn_layers, + batch_first=True, bidirectional=bidirectional) + + def forward(self, x, state=None): + + x = x.permute(0, 2, 1) + B, T, C = x.shape + x, _ = self.cell(x, state) + if self.bidirectional: + out = torch.cat([x[:, -1, :self.hidden_size], x[:, 0, self.hidden_size:]], dim=-1) + else: + out = x[:, -1, :] + assert out.shape[0] == B + return out + + +class SeqEncoderGraph(nn.Module): + ''' + ''' + + def __init__(self, + embedding_size, + layer_configs, + residual, + local_bn, + A, + T, + share_weights=False + ) -> None: + super().__init__() + + self.C_in = layer_configs[0][0] + self.C_out = embedding_size + + self.num_joints = A.shape[1] + + self.graph_encoder = AudioPoseEncoderGraph( + layers_config=layer_configs, + A=A, + residual=residual, + local_bn=local_bn, + share_weights=share_weights + ) + + cur_C = layer_configs[-1][1] + self.spatial_pool = ConvNormRelu( + in_channels=cur_C, + out_channels=cur_C, + type='2d', + kernel_size=(1, self.num_joints), + stride=(1, 1), + padding=(0, 0) + ) + + temporal_pool = nn.ModuleList([]) + cur_H = T + num_layers = 0 + self.temporal_conv_info = [] + while cur_C < self.C_out or cur_H > 1: + self.temporal_conv_info.append(cur_C) + ks = [3, 1] + st = [1, 1] + + if cur_H > 1: + if cur_H > 4: + ks[0] = 4 + st[0] = 2 + else: + ks[0] = cur_H + st[0] = cur_H + + temporal_pool.append(ConvNormRelu( + in_channels=cur_C, + out_channels=min(self.C_out, cur_C * 2), + type='2d', + kernel_size=tuple(ks), + stride=tuple(st) + )) + cur_C = min(cur_C * 2, self.C_out) + + if cur_H > 1: + if cur_H > 4: + cur_H //= 2 + else: + cur_H = 1 + + num_layers += 1 + + self.temporal_pool = nn.Sequential(*temporal_pool) + print("graph seq encoder info: temporal pool:", self.temporal_conv_info) + self.num_layers = num_layers + # need fc? + + def forward(self, x): + ''' + x: (B, C, T) + ''' + B, C, T = x.shape + x = self.graph_encoder(x) + x = self.spatial_pool(x) + x = self.temporal_pool(x) + x = x.view(B, self.C_out) + + return x + + +class SeqDecoder2D(nn.Module): + ''' + (B, D)->(B, D, 1, 1)->(B, C_out, C, T)->(B, C_out, T) + ''' + + def __init__(self): + super(SeqDecoder2D, self).__init__() + raise NotImplementedError + + +class SeqDecoder1D(nn.Module): + ''' + (B, D)->(B, D, 1)->...->(B, C_out, T) + ''' + + def __init__(self, + D_in, + C_out, + T_out, + min_layer_num=None + ): + super(SeqDecoder1D, self).__init__() + self.T_out = T_out + self.min_layer_num = min_layer_num + + cur_t = 1 + + self.pre_conv = ConvNormRelu( + in_channels=D_in, + out_channels=C_out, + type='1d' + ) + self.num_layers = 1 + self.upconv = nn.Upsample(scale_factor=2, mode='nearest') + self.conv_layers = nn.ModuleList([]) + cur_t *= 2 + while cur_t <= T_out: + self.conv_layers.append(ConvNormRelu( + in_channels=C_out, + out_channels=C_out, + type='1d' + )) + cur_t *= 2 + self.num_layers += 1 + + post_conv = nn.ModuleList([ConvNormRelu( + in_channels=C_out, + out_channels=C_out, + type='1d' + )]) + self.num_layers += 1 + if min_layer_num is not None and self.num_layers < min_layer_num: + while self.num_layers < min_layer_num: + post_conv.append(ConvNormRelu( + in_channels=C_out, + out_channels=C_out, + type='1d' + )) + self.num_layers += 1 + self.post_conv = nn.Sequential(*post_conv) + + def forward(self, x): + + x = x.unsqueeze(-1) + x = self.pre_conv(x) + for conv in self.conv_layers: + x = self.upconv(x) + x = conv(x) + + x = torch.nn.functional.interpolate(x, size=self.T_out, mode='nearest') + x = self.post_conv(x) + return x + + +class SeqDecoderRNN(nn.Module): + ''' + (B, D)->(B, C_out, T) + ''' + + def __init__(self, + hidden_size, + C_out, + T_out, + num_layers, + rnn_cell='gru' + ): + super(SeqDecoderRNN, self).__init__() + self.num_steps = T_out + if rnn_cell == 'gru': + self.cell = nn.GRU(input_size=C_out, hidden_size=hidden_size, num_layers=num_layers, batch_first=True, + bidirectional=False) + elif rnn_cell == 'lstm': + self.cell = nn.LSTM(input_size=C_out, hidden_size=hidden_size, num_layers=num_layers, batch_first=True, + bidirectional=False) + else: + raise ValueError('invalid rnn cell:%s' % (rnn_cell)) + + self.fc = nn.Linear(hidden_size, C_out) + + def forward(self, hidden, frame_0): + frame_0 = frame_0.permute(0, 2, 1) + dec_input = frame_0 + outputs = [] + for i in range(self.num_steps): + frame_out, hidden = self.cell(dec_input, hidden) + frame_out = self.fc(frame_out) + dec_input = frame_out + outputs.append(frame_out) + output = torch.cat(outputs, dim=1) + return output.permute(0, 2, 1) + + +class SeqTranslator2D(nn.Module): + ''' + (B, C, T)->(B, 1, C, T)-> ... -> (B, 1, C_out, T_out) + ''' + + def __init__(self, + C_in=64, + C_out=108, + T_in=75, + T_out=25, + residual=True + ): + super(SeqTranslator2D, self).__init__() + print("Warning: hard coded") + self.C_in = C_in + self.C_out = C_out + self.T_in = T_in + self.T_out = T_out + self.residual = residual + + self.conv_layers = nn.Sequential( + ConvNormRelu(1, 32, '2d', kernel_size=5, stride=1), + ConvNormRelu(32, 32, '2d', kernel_size=5, stride=1, residual=self.residual), + ConvNormRelu(32, 32, '2d', kernel_size=5, stride=1, residual=self.residual), + + ConvNormRelu(32, 64, '2d', kernel_size=5, stride=(4, 3)), + ConvNormRelu(64, 64, '2d', kernel_size=5, stride=1, residual=self.residual), + ConvNormRelu(64, 64, '2d', kernel_size=5, stride=1, residual=self.residual), + + ConvNormRelu(64, 128, '2d', kernel_size=5, stride=(4, 1)), + ConvNormRelu(128, 108, '2d', kernel_size=3, stride=(4, 1)), + ConvNormRelu(108, 108, '2d', kernel_size=(1, 3), stride=1, residual=self.residual), + + ConvNormRelu(108, 108, '2d', kernel_size=(1, 3), stride=1, residual=self.residual), + ConvNormRelu(108, 108, '2d', kernel_size=(1, 3), stride=1), + ) + + def forward(self, x): + assert len(x.shape) == 3 and x.shape[1] == self.C_in and x.shape[2] == self.T_in + x = x.view(x.shape[0], 1, x.shape[1], x.shape[2]) + x = self.conv_layers(x) + x = x.squeeze(2) + return x + + +class SeqTranslator1D(nn.Module): + ''' + (B, C, T)->(B, C_out, T) + ''' + + def __init__(self, + C_in, + C_out, + kernel_size=None, + stride=None, + min_layers_num=None, + residual=True, + norm='bn' + ): + super(SeqTranslator1D, self).__init__() + + conv_layers = nn.ModuleList([]) + conv_layers.append(ConvNormRelu( + in_channels=C_in, + out_channels=C_out, + type='1d', + kernel_size=kernel_size, + stride=stride, + residual=residual, + norm=norm + )) + self.num_layers = 1 + if min_layers_num is not None and self.num_layers < min_layers_num: + while self.num_layers < min_layers_num: + conv_layers.append(ConvNormRelu( + in_channels=C_out, + out_channels=C_out, + type='1d', + kernel_size=kernel_size, + stride=stride, + residual=residual, + norm=norm + )) + self.num_layers += 1 + self.conv_layers = nn.Sequential(*conv_layers) + + def forward(self, x): + return self.conv_layers(x) + + +class SeqTranslatorRNN(nn.Module): + ''' + (B, C, T)->(B, C_out, T) + LSTM-FC + ''' + + def __init__(self, + C_in, + C_out, + hidden_size, + num_layers, + rnn_cell='gru' + ): + super(SeqTranslatorRNN, self).__init__() + + if rnn_cell == 'gru': + self.enc_cell = nn.GRU(input_size=C_in, hidden_size=hidden_size, num_layers=num_layers, batch_first=True, + bidirectional=False) + self.dec_cell = nn.GRU(input_size=C_out, hidden_size=hidden_size, num_layers=num_layers, batch_first=True, + bidirectional=False) + elif rnn_cell == 'lstm': + self.enc_cell = nn.LSTM(input_size=C_in, hidden_size=hidden_size, num_layers=num_layers, batch_first=True, + bidirectional=False) + self.dec_cell = nn.LSTM(input_size=C_out, hidden_size=hidden_size, num_layers=num_layers, batch_first=True, + bidirectional=False) + else: + raise ValueError('invalid rnn cell:%s' % (rnn_cell)) + + self.fc = nn.Linear(hidden_size, C_out) + + def forward(self, x, frame_0): + + num_steps = x.shape[-1] + x = x.permute(0, 2, 1) + frame_0 = frame_0.permute(0, 2, 1) + _, hidden = self.enc_cell(x, None) + + outputs = [] + for i in range(num_steps): + inputs = frame_0 + output_frame, hidden = self.dec_cell(inputs, hidden) + output_frame = self.fc(output_frame) + frame_0 = output_frame + outputs.append(output_frame) + outputs = torch.cat(outputs, dim=1) + return outputs.permute(0, 2, 1) + + +class ResBlock(nn.Module): + def __init__(self, + input_dim, + fc_dim, + afn, + nfn + ): + ''' + afn: activation fn + nfn: normalization fn + ''' + super(ResBlock, self).__init__() + + self.input_dim = input_dim + self.fc_dim = fc_dim + self.afn = afn + self.nfn = nfn + + if self.afn != 'relu': + raise ValueError('Wrong') + + if self.nfn == 'layer_norm': + raise ValueError('wrong') + + self.layers = nn.Sequential( + nn.Linear(self.input_dim, self.fc_dim // 2), + nn.ReLU(), + nn.Linear(self.fc_dim // 2, self.fc_dim // 2), + nn.ReLU(), + nn.Linear(self.fc_dim // 2, self.fc_dim), + nn.ReLU() + ) + + self.shortcut_layer = nn.Sequential( + nn.Linear(self.input_dim, self.fc_dim), + nn.ReLU(), + ) + + def forward(self, inputs): + return self.layers(inputs) + self.shortcut_layer(inputs) + + +class AudioEncoder(nn.Module): + def __init__(self, channels, padding=3, kernel_size=8, conv_stride=2, conv_pool=None, augmentation=False): + super(AudioEncoder, self).__init__() + self.in_channels = channels[0] + self.augmentation = augmentation + + model = [] + acti = nn.LeakyReLU(0.2) + + nr_layer = len(channels) - 1 + + for i in range(nr_layer): + if conv_pool is None: + model.append(nn.ReflectionPad1d(padding)) + model.append(nn.Conv1d(channels[i], channels[i + 1], kernel_size=kernel_size, stride=conv_stride)) + model.append(acti) + else: + model.append(nn.ReflectionPad1d(padding)) + model.append(nn.Conv1d(channels[i], channels[i + 1], kernel_size=kernel_size, stride=conv_stride)) + model.append(acti) + model.append(conv_pool(kernel_size=2, stride=2)) + + if self.augmentation: + model.append( + nn.Conv1d(channels[-1], channels[-1], kernel_size=kernel_size, stride=conv_stride) + ) + model.append(acti) + + self.model = nn.Sequential(*model) + + def forward(self, x): + + x = x[:, :self.in_channels, :] + x = self.model(x) + return x + + +class AudioDecoder(nn.Module): + def __init__(self, channels, kernel_size=7, ups=25): + super(AudioDecoder, self).__init__() + + model = [] + pad = (kernel_size - 1) // 2 + acti = nn.LeakyReLU(0.2) + + for i in range(len(channels) - 2): + model.append(nn.Upsample(scale_factor=2, mode='nearest')) + model.append(nn.ReflectionPad1d(pad)) + model.append(nn.Conv1d(channels[i], channels[i + 1], + kernel_size=kernel_size, stride=1)) + if i == 0 or i == 1: + model.append(nn.Dropout(p=0.2)) + if not i == len(channels) - 2: + model.append(acti) + + model.append(nn.Upsample(size=ups, mode='nearest')) + model.append(nn.ReflectionPad1d(pad)) + model.append(nn.Conv1d(channels[-2], channels[-1], + kernel_size=kernel_size, stride=1)) + + self.model = nn.Sequential(*model) + + def forward(self, x): + return self.model(x) + + +class Audio2Pose(nn.Module): + def __init__(self, pose_dim, embed_size, augmentation, ups=25): + super(Audio2Pose, self).__init__() + self.pose_dim = pose_dim + self.embed_size = embed_size + self.augmentation = augmentation + + self.aud_enc = AudioEncoder(channels=[13, 64, 128, 256], padding=2, kernel_size=7, conv_stride=1, + conv_pool=nn.AvgPool1d, augmentation=self.augmentation) + if self.augmentation: + self.aud_dec = AudioDecoder(channels=[512, 256, 128, pose_dim]) + else: + self.aud_dec = AudioDecoder(channels=[256, 256, 128, pose_dim], ups=ups) + + if self.augmentation: + self.pose_enc = nn.Sequential( + nn.Linear(self.embed_size // 2, 256), + nn.LayerNorm(256) + ) + + def forward(self, audio_feat, dec_input=None): + + B = audio_feat.shape[0] + + aud_embed = self.aud_enc.forward(audio_feat) + + if self.augmentation: + dec_input = dec_input.squeeze(0) + dec_embed = self.pose_enc(dec_input) + dec_embed = dec_embed.unsqueeze(2) + dec_embed = dec_embed.expand(dec_embed.shape[0], dec_embed.shape[1], aud_embed.shape[-1]) + aud_embed = torch.cat([aud_embed, dec_embed], dim=1) + + out = self.aud_dec.forward(aud_embed) + return out + + +if __name__ == '__main__': + import numpy as np + import os + import sys + + test_model = SeqEncoder2D( + C_in=2, + T_in=25, + C_out=512, + num_joints=54, + ) + print(test_model.num_layers) + + input = torch.randn((64, 108, 25)) + output = test_model(input) + print(output.shape) \ No newline at end of file diff --git a/nets/smplx_body_pixel.py b/nets/smplx_body_pixel.py new file mode 100644 index 0000000000000000000000000000000000000000..02bb6c672ecf18371f1ff0f16732c8c16db9f2a8 --- /dev/null +++ b/nets/smplx_body_pixel.py @@ -0,0 +1,326 @@ +import os +import sys + +import torch +from torch.optim.lr_scheduler import StepLR + +sys.path.append(os.getcwd()) + +from nets.layers import * +from nets.base import TrainWrapperBaseClass +from nets.spg.gated_pixelcnn_v2 import GatedPixelCNN as pixelcnn +from nets.spg.vqvae_1d import VQVAE as s2g_body, Wav2VecEncoder +from nets.spg.vqvae_1d import AudioEncoder +from nets.utils import parse_audio, denormalize +from data_utils import get_mfcc, get_melspec, get_mfcc_old, get_mfcc_psf, get_mfcc_psf_min, get_mfcc_ta +import numpy as np +import torch.optim as optim +import torch.nn.functional as F +from sklearn.preprocessing import normalize + +from data_utils.lower_body import c_index, c_index_3d, c_index_6d +from data_utils.utils import smooth_geom, get_mfcc_sepa + + +class TrainWrapper(TrainWrapperBaseClass): + ''' + a wrapper receving a batch from data_utils and calculate loss + ''' + + def __init__(self, args, config): + self.args = args + self.config = config + self.device = torch.device(self.args.gpu) + self.global_step = 0 + + self.convert_to_6d = self.config.Data.pose.convert_to_6d + self.expression = self.config.Data.pose.expression + self.epoch = 0 + self.init_params() + self.num_classes = 4 + self.audio = True + self.composition = self.config.Model.composition + self.bh_model = self.config.Model.bh_model + + if self.audio: + self.audioencoder = AudioEncoder(in_dim=64, num_hiddens=256, num_residual_layers=2, num_residual_hiddens=256).to(self.device) + else: + self.audioencoder = None + if self.convert_to_6d: + dim, layer = 512, 10 + else: + dim, layer = 256, 15 + self.generator = pixelcnn(2048, dim, layer, self.num_classes, self.audio, self.bh_model).to(self.device) + self.g_body = s2g_body(self.each_dim[1], embedding_dim=64, num_embeddings=config.Model.code_num, num_hiddens=1024, + num_residual_layers=2, num_residual_hiddens=512).to(self.device) + self.g_hand = s2g_body(self.each_dim[2], embedding_dim=64, num_embeddings=config.Model.code_num, num_hiddens=1024, + num_residual_layers=2, num_residual_hiddens=512).to(self.device) + + model_path = self.config.Model.vq_path + model_ckpt = torch.load(model_path, map_location=torch.device('cpu')) + self.g_body.load_state_dict(model_ckpt['generator']['g_body']) + self.g_hand.load_state_dict(model_ckpt['generator']['g_hand']) + + if torch.cuda.device_count() > 1: + self.g_body = torch.nn.DataParallel(self.g_body, device_ids=[0, 1]) + self.g_hand = torch.nn.DataParallel(self.g_hand, device_ids=[0, 1]) + self.generator = torch.nn.DataParallel(self.generator, device_ids=[0, 1]) + if self.audioencoder is not None: + self.audioencoder = torch.nn.DataParallel(self.audioencoder, device_ids=[0, 1]) + + self.discriminator = None + if self.convert_to_6d: + self.c_index = c_index_6d + else: + self.c_index = c_index_3d + + super().__init__(args, config) + + def init_optimizer(self): + + print('using Adam') + self.generator_optimizer = optim.Adam( + self.generator.parameters(), + lr=self.config.Train.learning_rate.generator_learning_rate, + betas=[0.9, 0.999] + ) + if self.audioencoder is not None: + opt = self.config.Model.AudioOpt + if opt == 'Adam': + self.audioencoder_optimizer = optim.Adam( + self.audioencoder.parameters(), + lr=self.config.Train.learning_rate.generator_learning_rate, + betas=[0.9, 0.999] + ) + else: + print('using SGD') + self.audioencoder_optimizer = optim.SGD( + filter(lambda p: p.requires_grad,self.audioencoder.parameters()), + lr=self.config.Train.learning_rate.generator_learning_rate*10, + momentum=0.9, + nesterov=False, + ) + + def state_dict(self): + model_state = { + 'generator': self.generator.state_dict(), + 'generator_optim': self.generator_optimizer.state_dict(), + 'audioencoder': self.audioencoder.state_dict() if self.audio else None, + 'audioencoder_optim': self.audioencoder_optimizer.state_dict() if self.audio else None, + 'discriminator': self.discriminator.state_dict() if self.discriminator is not None else None, + 'discriminator_optim': self.discriminator_optimizer.state_dict() if self.discriminator is not None else None + } + return model_state + + def load_state_dict(self, state_dict): + + from collections import OrderedDict + new_state_dict = OrderedDict() # create new OrderedDict that does not contain `module.` + for k, v in state_dict.items(): + sub_dict = OrderedDict() + if v is not None: + for k1, v1 in v.items(): + name = k1.replace('module.', '') + sub_dict[name] = v1 + new_state_dict[k] = sub_dict + state_dict = new_state_dict + if 'generator' in state_dict: + self.generator.load_state_dict(state_dict['generator']) + else: + self.generator.load_state_dict(state_dict) + + if 'generator_optim' in state_dict and self.generator_optimizer is not None: + self.generator_optimizer.load_state_dict(state_dict['generator_optim']) + + if self.discriminator is not None: + self.discriminator.load_state_dict(state_dict['discriminator']) + + if 'discriminator_optim' in state_dict and self.discriminator_optimizer is not None: + self.discriminator_optimizer.load_state_dict(state_dict['discriminator_optim']) + + if 'audioencoder' in state_dict and self.audioencoder is not None: + self.audioencoder.load_state_dict(state_dict['audioencoder']) + + def init_params(self): + if self.config.Data.pose.convert_to_6d: + scale = 2 + else: + scale = 1 + + global_orient = round(0 * scale) + leye_pose = reye_pose = round(0 * scale) + jaw_pose = round(0 * scale) + body_pose = round((63 - 24) * scale) + left_hand_pose = right_hand_pose = round(45 * scale) + if self.expression: + expression = 100 + else: + expression = 0 + + b_j = 0 + jaw_dim = jaw_pose + b_e = b_j + jaw_dim + eye_dim = leye_pose + reye_pose + b_b = b_e + eye_dim + body_dim = global_orient + body_pose + b_h = b_b + body_dim + hand_dim = left_hand_pose + right_hand_pose + b_f = b_h + hand_dim + face_dim = expression + + self.dim_list = [b_j, b_e, b_b, b_h, b_f] + self.full_dim = jaw_dim + eye_dim + body_dim + hand_dim + self.pose = int(self.full_dim / round(3 * scale)) + self.each_dim = [jaw_dim, eye_dim + body_dim, hand_dim, face_dim] + + def __call__(self, bat): + # assert (not self.args.infer), "infer mode" + self.global_step += 1 + + total_loss = None + loss_dict = {} + + aud, poses = bat['aud_feat'].to(self.device).to(torch.float32), bat['poses'].to(self.device).to(torch.float32) + + id = bat['speaker'].to(self.device) - 20 + # id = F.one_hot(id, self.num_classes) + + poses = poses[:, self.c_index, :] + + aud = aud.permute(0, 2, 1) + gt_poses = poses.permute(0, 2, 1) + + with torch.no_grad(): + self.g_body.eval() + self.g_hand.eval() + if torch.cuda.device_count() > 1: + _, body_latents = self.g_body.module.encode(gt_poses=gt_poses[..., :self.each_dim[1]], id=id) + _, hand_latents = self.g_hand.module.encode(gt_poses=gt_poses[..., self.each_dim[1]:], id=id) + else: + _, body_latents = self.g_body.encode(gt_poses=gt_poses[..., :self.each_dim[1]], id=id) + _, hand_latents = self.g_hand.encode(gt_poses=gt_poses[..., self.each_dim[1]:], id=id) + latents = torch.cat([body_latents.unsqueeze(dim=-1), hand_latents.unsqueeze(dim=-1)], dim=-1) + latents = latents.detach() + + if self.audio: + audio = self.audioencoder(aud[:, :].transpose(1, 2), frame_num=latents.shape[1]*4).unsqueeze(dim=-1).repeat(1, 1, 1, 2) + logits = self.generator(latents[:, :], id, audio) + else: + logits = self.generator(latents, id) + logits = logits.permute(0, 2, 3, 1).contiguous() + + self.generator_optimizer.zero_grad() + if self.audio: + self.audioencoder_optimizer.zero_grad() + + loss = F.cross_entropy(logits.view(-1, logits.shape[-1]), latents.view(-1)) + loss.backward() + + grad = torch.nn.utils.clip_grad_norm(self.generator.parameters(), self.config.Train.max_gradient_norm) + + if torch.isnan(grad).sum() > 0: + print('fuck') + + loss_dict['grad'] = grad.item() + loss_dict['ce_loss'] = loss.item() + self.generator_optimizer.step() + if self.audio: + self.audioencoder_optimizer.step() + + return total_loss, loss_dict + + def infer_on_audio(self, aud_fn, initial_pose=None, norm_stats=None, exp=None, var=None, w_pre=False, rand=None, + continuity=False, id=None, fps=15, sr=22000, B=1, am=None, am_sr=None, frame=0,**kwargs): + ''' + initial_pose: (B, C, T), normalized + (aud_fn, txgfile) -> generated motion (B, T, C) + ''' + output = [] + + assert self.args.infer, "train mode" + self.generator.eval() + self.g_body.eval() + self.g_hand.eval() + + if continuity: + aud_feat, gap = get_mfcc_sepa(aud_fn, sr=sr, fps=fps) + else: + aud_feat = get_mfcc_ta(aud_fn, sr=sr, fps=fps, smlpx=True, type='mfcc', am=am) + aud_feat = aud_feat.transpose(1, 0) + aud_feat = aud_feat[np.newaxis, ...].repeat(B, axis=0) + aud_feat = torch.tensor(aud_feat, dtype=torch.float32).to(self.device) + + if id is None: + id = torch.tensor([0]).to(self.device) + else: + id = id.repeat(B) + + with torch.no_grad(): + aud_feat = aud_feat.permute(0, 2, 1) + if continuity: + self.audioencoder.eval() + pre_pose = {} + pre_pose['b'] = pre_pose['h'] = None + pre_latents, pre_audio, body_0, hand_0 = self.infer(aud_feat[:, :gap], frame, id, B, pre_pose=pre_pose) + pre_pose['b'] = body_0[:, :, -4:].transpose(1,2) + pre_pose['h'] = hand_0[:, :, -4:].transpose(1,2) + _, _, body_1, hand_1 = self.infer(aud_feat[:, gap:], frame, id, B, pre_latents, pre_audio, pre_pose) + body = torch.cat([body_0, body_1], dim=2) + hand = torch.cat([hand_0, hand_1], dim=2) + + else: + if self.audio: + self.audioencoder.eval() + audio = self.audioencoder(aud_feat.transpose(1, 2), frame_num=frame).unsqueeze(dim=-1).repeat(1, 1, 1, 2) + latents = self.generator.generate(id, shape=[audio.shape[2], 2], batch_size=B, aud_feat=audio) + else: + latents = self.generator.generate(id, shape=[aud_feat.shape[1]//4, 2], batch_size=B) + + body_latents = latents[..., 0] + hand_latents = latents[..., 1] + + body, _ = self.g_body.decode(b=body_latents.shape[0], w=body_latents.shape[1], latents=body_latents) + hand, _ = self.g_hand.decode(b=hand_latents.shape[0], w=hand_latents.shape[1], latents=hand_latents) + + pred_poses = torch.cat([body, hand], dim=1).transpose(1,2).cpu().numpy() + + output = pred_poses + + return output + + def infer(self, aud_feat, frame, id, B, pre_latents=None, pre_audio=None, pre_pose=None): + audio = self.audioencoder(aud_feat.transpose(1, 2), frame_num=frame).unsqueeze(dim=-1).repeat(1, 1, 1, 2) + latents = self.generator.generate(id, shape=[audio.shape[2], 2], batch_size=B, aud_feat=audio, + pre_latents=pre_latents, pre_audio=pre_audio) + + body_latents = latents[..., 0] + hand_latents = latents[..., 1] + + body, _ = self.g_body.decode(b=body_latents.shape[0], w=body_latents.shape[1], + latents=body_latents, pre_state=pre_pose['b']) + hand, _ = self.g_hand.decode(b=hand_latents.shape[0], w=hand_latents.shape[1], + latents=hand_latents, pre_state=pre_pose['h']) + + return latents, audio, body, hand + + def generate(self, aud, id, frame_num=0): + + self.generator.eval() + self.g_body.eval() + self.g_hand.eval() + aud_feat = aud.permute(0, 2, 1) + if self.audio: + self.audioencoder.eval() + audio = self.audioencoder(aud_feat.transpose(1, 2), frame_num=frame_num).unsqueeze(dim=-1).repeat(1, 1, 1, 2) + latents = self.generator.generate(id, shape=[audio.shape[2], 2], batch_size=aud.shape[0], aud_feat=audio) + else: + latents = self.generator.generate(id, shape=[aud_feat.shape[1] // 4, 2], batch_size=aud.shape[0]) + + body_latents = latents[..., 0] + hand_latents = latents[..., 1] + + body = self.g_body.decode(b=body_latents.shape[0], w=body_latents.shape[1], latents=body_latents) + hand = self.g_hand.decode(b=hand_latents.shape[0], w=hand_latents.shape[1], latents=hand_latents) + + pred_poses = torch.cat([body, hand], dim=1).transpose(1, 2) + return pred_poses diff --git a/nets/smplx_body_vq.py b/nets/smplx_body_vq.py new file mode 100644 index 0000000000000000000000000000000000000000..95770ac251b873de26b4530f0a37fe43ed5e14f5 --- /dev/null +++ b/nets/smplx_body_vq.py @@ -0,0 +1,302 @@ +import os +import sys + +from torch.optim.lr_scheduler import StepLR + +sys.path.append(os.getcwd()) + +from nets.layers import * +from nets.base import TrainWrapperBaseClass +from nets.spg.s2glayers import Generator as G_S2G, Discriminator as D_S2G +from nets.spg.vqvae_1d import VQVAE as s2g_body +from nets.utils import parse_audio, denormalize +from data_utils import get_mfcc, get_melspec, get_mfcc_old, get_mfcc_psf, get_mfcc_psf_min, get_mfcc_ta +import numpy as np +import torch.optim as optim +import torch.nn.functional as F +from sklearn.preprocessing import normalize + +from data_utils.lower_body import c_index, c_index_3d, c_index_6d + + +class TrainWrapper(TrainWrapperBaseClass): + ''' + a wrapper receving a batch from data_utils and calculate loss + ''' + + def __init__(self, args, config): + self.args = args + self.config = config + self.device = torch.device(self.args.gpu) + self.global_step = 0 + + self.convert_to_6d = self.config.Data.pose.convert_to_6d + self.expression = self.config.Data.pose.expression + self.epoch = 0 + self.init_params() + self.num_classes = 4 + self.composition = self.config.Model.composition + if self.composition: + self.g_body = s2g_body(self.each_dim[1], embedding_dim=64, num_embeddings=config.Model.code_num, num_hiddens=1024, + num_residual_layers=2, num_residual_hiddens=512).to(self.device) + self.g_hand = s2g_body(self.each_dim[2], embedding_dim=64, num_embeddings=config.Model.code_num, num_hiddens=1024, + num_residual_layers=2, num_residual_hiddens=512).to(self.device) + else: + self.g = s2g_body(self.each_dim[1] + self.each_dim[2], embedding_dim=64, num_embeddings=config.Model.code_num, + num_hiddens=1024, num_residual_layers=2, num_residual_hiddens=512).to(self.device) + + self.discriminator = None + + if self.convert_to_6d: + self.c_index = c_index_6d + else: + self.c_index = c_index_3d + + super().__init__(args, config) + + def init_optimizer(self): + print('using Adam') + if self.composition: + self.g_body_optimizer = optim.Adam( + self.g_body.parameters(), + lr=self.config.Train.learning_rate.generator_learning_rate, + betas=[0.9, 0.999] + ) + self.g_hand_optimizer = optim.Adam( + self.g_hand.parameters(), + lr=self.config.Train.learning_rate.generator_learning_rate, + betas=[0.9, 0.999] + ) + else: + self.g_optimizer = optim.Adam( + self.g.parameters(), + lr=self.config.Train.learning_rate.generator_learning_rate, + betas=[0.9, 0.999] + ) + + def state_dict(self): + if self.composition: + model_state = { + 'g_body': self.g_body.state_dict(), + 'g_body_optim': self.g_body_optimizer.state_dict(), + 'g_hand': self.g_hand.state_dict(), + 'g_hand_optim': self.g_hand_optimizer.state_dict(), + 'discriminator': self.discriminator.state_dict() if self.discriminator is not None else None, + 'discriminator_optim': self.discriminator_optimizer.state_dict() if self.discriminator is not None else None + } + else: + model_state = { + 'g': self.g.state_dict(), + 'g_optim': self.g_optimizer.state_dict(), + 'discriminator': self.discriminator.state_dict() if self.discriminator is not None else None, + 'discriminator_optim': self.discriminator_optimizer.state_dict() if self.discriminator is not None else None + } + return model_state + + def init_params(self): + if self.config.Data.pose.convert_to_6d: + scale = 2 + else: + scale = 1 + + global_orient = round(0 * scale) + leye_pose = reye_pose = round(0 * scale) + jaw_pose = round(0 * scale) + body_pose = round((63 - 24) * scale) + left_hand_pose = right_hand_pose = round(45 * scale) + if self.expression: + expression = 100 + else: + expression = 0 + + b_j = 0 + jaw_dim = jaw_pose + b_e = b_j + jaw_dim + eye_dim = leye_pose + reye_pose + b_b = b_e + eye_dim + body_dim = global_orient + body_pose + b_h = b_b + body_dim + hand_dim = left_hand_pose + right_hand_pose + b_f = b_h + hand_dim + face_dim = expression + + self.dim_list = [b_j, b_e, b_b, b_h, b_f] + self.full_dim = jaw_dim + eye_dim + body_dim + hand_dim + self.pose = int(self.full_dim / round(3 * scale)) + self.each_dim = [jaw_dim, eye_dim + body_dim, hand_dim, face_dim] + + def __call__(self, bat): + # assert (not self.args.infer), "infer mode" + self.global_step += 1 + + total_loss = None + loss_dict = {} + + aud, poses = bat['aud_feat'].to(self.device).to(torch.float32), bat['poses'].to(self.device).to(torch.float32) + + # id = bat['speaker'].to(self.device) - 20 + # id = F.one_hot(id, self.num_classes) + + poses = poses[:, self.c_index, :] + gt_poses = poses.permute(0, 2, 1) + b_poses = gt_poses[..., :self.each_dim[1]] + h_poses = gt_poses[..., self.each_dim[1]:] + + if self.composition: + loss = 0 + loss_dict, loss = self.vq_train(b_poses[:, :], 'b', self.g_body, loss_dict, loss) + loss_dict, loss = self.vq_train(h_poses[:, :], 'h', self.g_hand, loss_dict, loss) + else: + loss = 0 + loss_dict, loss = self.vq_train(gt_poses[:, :], 'g', self.g, loss_dict, loss) + + return total_loss, loss_dict + + def vq_train(self, gt, name, model, dict, total_loss, pre=None): + e_q_loss, x_recon = model(gt_poses=gt, pre_state=pre) + loss, loss_dict = self.get_loss(pred_poses=x_recon, gt_poses=gt, e_q_loss=e_q_loss, pre=pre) + # total_loss = total_loss + loss + + if name == 'b': + optimizer_name = 'g_body_optimizer' + elif name == 'h': + optimizer_name = 'g_hand_optimizer' + elif name == 'g': + optimizer_name = 'g_optimizer' + else: + raise ValueError("model's name must be b or h") + optimizer = getattr(self, optimizer_name) + optimizer.zero_grad() + loss.backward() + optimizer.step() + + for key in list(loss_dict.keys()): + dict[name + key] = loss_dict.get(key, 0).item() + return dict, total_loss + + def get_loss(self, + pred_poses, + gt_poses, + e_q_loss, + pre=None + ): + loss_dict = {} + + + rec_loss = torch.mean(torch.abs(pred_poses - gt_poses)) + v_pr = pred_poses[:, 1:] - pred_poses[:, :-1] + v_gt = gt_poses[:, 1:] - gt_poses[:, :-1] + velocity_loss = torch.mean(torch.abs(v_pr - v_gt)) + + if pre is None: + f0_vel = 0 + else: + v0_pr = pred_poses[:, 0] - pre[:, -1] + v0_gt = gt_poses[:, 0] - pre[:, -1] + f0_vel = torch.mean(torch.abs(v0_pr - v0_gt)) + + gen_loss = rec_loss + e_q_loss + velocity_loss + f0_vel + + loss_dict['rec_loss'] = rec_loss + loss_dict['velocity_loss'] = velocity_loss + # loss_dict['e_q_loss'] = e_q_loss + if pre is not None: + loss_dict['f0_vel'] = f0_vel + + return gen_loss, loss_dict + + def infer_on_audio(self, aud_fn, initial_pose=None, norm_stats=None, exp=None, var=None, w_pre=False, continuity=False, + id=None, fps=15, sr=22000, smooth=False, **kwargs): + ''' + initial_pose: (B, C, T), normalized + (aud_fn, txgfile) -> generated motion (B, T, C) + ''' + output = [] + + assert self.args.infer, "train mode" + if self.composition: + self.g_body.eval() + self.g_hand.eval() + else: + self.g.eval() + + if self.config.Data.pose.normalization: + assert norm_stats is not None + data_mean = norm_stats[0] + data_std = norm_stats[1] + + # assert initial_pose.shape[-1] == pre_length + if initial_pose is not None: + gt = initial_pose[:, :, :].to(self.device).to(torch.float32) + pre_poses = initial_pose[:, :, :15].permute(0, 2, 1).to(self.device).to(torch.float32) + poses = initial_pose.permute(0, 2, 1).to(self.device).to(torch.float32) + B = pre_poses.shape[0] + else: + gt = None + pre_poses = None + B = 1 + + if type(aud_fn) == torch.Tensor: + aud_feat = torch.tensor(aud_fn, dtype=torch.float32).to(self.device) + num_poses_to_generate = aud_feat.shape[-1] + else: + aud_feat = get_mfcc_ta(aud_fn, sr=sr, fps=fps, smlpx=True, type='mfcc').transpose(1, 0) + aud_feat = aud_feat[:, :] + num_poses_to_generate = aud_feat.shape[-1] + aud_feat = aud_feat[np.newaxis, ...].repeat(B, axis=0) + aud_feat = torch.tensor(aud_feat, dtype=torch.float32).to(self.device) + + # pre_poses = torch.randn(pre_poses.shape).to(self.device).to(torch.float32) + if id is None: + id = F.one_hot(torch.tensor([[0]]), self.num_classes).to(self.device) + + with torch.no_grad(): + aud_feat = aud_feat.permute(0, 2, 1) + gt_poses = gt[:, self.c_index].permute(0, 2, 1) + if self.composition: + if continuity: + pred_poses_body = [] + pred_poses_hand = [] + pre_b = None + pre_h = None + for i in range(5): + _, pred_body = self.g_body(gt_poses=gt_poses[:, i*60:(i+1)*60, :self.each_dim[1]], pre_state=pre_b) + pre_b = pred_body[..., -1:].transpose(1,2) + pred_poses_body.append(pred_body) + _, pred_hand = self.g_hand(gt_poses=gt_poses[:, i*60:(i+1)*60, self.each_dim[1]:], pre_state=pre_h) + pre_h = pred_hand[..., -1:].transpose(1,2) + pred_poses_hand.append(pred_hand) + + pred_poses_body = torch.cat(pred_poses_body, dim=2) + pred_poses_hand = torch.cat(pred_poses_hand, dim=2) + else: + _, pred_poses_body = self.g_body(gt_poses=gt_poses[..., :self.each_dim[1]], id=id) + _, pred_poses_hand = self.g_hand(gt_poses=gt_poses[..., self.each_dim[1]:], id=id) + pred_poses = torch.cat([pred_poses_body, pred_poses_hand], dim=1) + else: + _, pred_poses = self.g(gt_poses=gt_poses, id=id) + pred_poses = pred_poses.transpose(1, 2).cpu().numpy() + output = pred_poses + + if self.config.Data.pose.normalization: + output = denormalize(output, data_mean, data_std) + + if smooth: + lamda = 0.8 + smooth_f = 10 + frame = 149 + for i in range(smooth_f): + f = frame + i + l = lamda * (i + 1) / smooth_f + output[0, f] = (1 - l) * output[0, f - 1] + l * output[0, f] + + output = np.concatenate(output, axis=1) + + return output + + def load_state_dict(self, state_dict): + if self.composition: + self.g_body.load_state_dict(state_dict['g_body']) + self.g_hand.load_state_dict(state_dict['g_hand']) + else: + self.g.load_state_dict(state_dict['g']) diff --git a/nets/smplx_face.py b/nets/smplx_face.py new file mode 100644 index 0000000000000000000000000000000000000000..e591b9dab674770b60655f607892b068f412d75a --- /dev/null +++ b/nets/smplx_face.py @@ -0,0 +1,238 @@ +import os +import sys + +sys.path.append(os.getcwd()) + +from nets.layers import * +from nets.base import TrainWrapperBaseClass +# from nets.spg.faceformer import Faceformer +from nets.spg.s2g_face import Generator as s2g_face +from losses import KeypointLoss +from nets.utils import denormalize +from data_utils import get_mfcc_psf, get_mfcc_psf_min, get_mfcc_ta +import numpy as np +import torch.optim as optim +import torch.nn.functional as F +from sklearn.preprocessing import normalize +import smplx + + +class TrainWrapper(TrainWrapperBaseClass): + ''' + a wrapper receving a batch from data_utils and calculate loss + ''' + + def __init__(self, args, config): + self.args = args + self.config = config + self.device = torch.device(self.args.gpu) + self.global_step = 0 + + self.convert_to_6d = self.config.Data.pose.convert_to_6d + self.expression = self.config.Data.pose.expression + self.epoch = 0 + self.init_params() + self.num_classes = 4 + + self.generator = s2g_face( + n_poses=self.config.Data.pose.generate_length, + each_dim=self.each_dim, + dim_list=self.dim_list, + training=not self.args.infer, + device=self.device, + identity=False if self.convert_to_6d else True, + num_classes=self.num_classes, + ).to(self.device) + + # self.generator = Faceformer().to(self.device) + + self.discriminator = None + self.am = None + + self.MSELoss = KeypointLoss().to(self.device) + super().__init__(args, config) + + def init_optimizer(self): + self.generator_optimizer = optim.SGD( + filter(lambda p: p.requires_grad,self.generator.parameters()), + lr=0.001, + momentum=0.9, + nesterov=False, + ) + + def init_params(self): + if self.convert_to_6d: + scale = 2 + else: + scale = 1 + + global_orient = round(3 * scale) + leye_pose = reye_pose = round(3 * scale) + jaw_pose = round(3 * scale) + body_pose = round(63 * scale) + left_hand_pose = right_hand_pose = round(45 * scale) + if self.expression: + expression = 100 + else: + expression = 0 + + b_j = 0 + jaw_dim = jaw_pose + b_e = b_j + jaw_dim + eye_dim = leye_pose + reye_pose + b_b = b_e + eye_dim + body_dim = global_orient + body_pose + b_h = b_b + body_dim + hand_dim = left_hand_pose + right_hand_pose + b_f = b_h + hand_dim + face_dim = expression + + self.dim_list = [b_j, b_e, b_b, b_h, b_f] + self.full_dim = jaw_dim + eye_dim + body_dim + hand_dim + face_dim + self.pose = int(self.full_dim / round(3 * scale)) + self.each_dim = [jaw_dim, eye_dim + body_dim, hand_dim, face_dim] + + def __call__(self, bat): + # assert (not self.args.infer), "infer mode" + self.global_step += 1 + + total_loss = None + loss_dict = {} + + aud, poses = bat['aud_feat'].to(self.device).to(torch.float32), bat['poses'].to(self.device).to(torch.float32) + id = bat['speaker'].to(self.device) - 20 + id = F.one_hot(id, self.num_classes) + + aud = aud.permute(0, 2, 1) + gt_poses = poses.permute(0, 2, 1) + + if self.expression: + expression = bat['expression'].to(self.device).to(torch.float32) + gt_poses = torch.cat([gt_poses, expression.permute(0, 2, 1)], dim=2) + + pred_poses, _ = self.generator( + aud, + gt_poses, + id, + ) + + G_loss, G_loss_dict = self.get_loss( + pred_poses=pred_poses, + gt_poses=gt_poses, + pre_poses=None, + mode='training_G', + gt_conf=None, + aud=aud, + ) + + self.generator_optimizer.zero_grad() + G_loss.backward() + grad = torch.nn.utils.clip_grad_norm(self.generator.parameters(), self.config.Train.max_gradient_norm) + loss_dict['grad'] = grad.item() + self.generator_optimizer.step() + + for key in list(G_loss_dict.keys()): + loss_dict[key] = G_loss_dict.get(key, 0).item() + + return total_loss, loss_dict + + def get_loss(self, + pred_poses, + gt_poses, + pre_poses, + aud, + mode='training_G', + gt_conf=None, + exp=1, + gt_nzero=None, + pre_nzero=None, + ): + loss_dict = {} + + + [b_j, b_e, b_b, b_h, b_f] = self.dim_list + + MSELoss = torch.mean(torch.abs(pred_poses[:, :, :6] - gt_poses[:, :, :6])) + if self.expression: + expl = torch.mean((pred_poses[:, :, -100:] - gt_poses[:, :, -100:])**2) + else: + expl = 0 + + gen_loss = expl + MSELoss + + loss_dict['MSELoss'] = MSELoss + if self.expression: + loss_dict['exp_loss'] = expl + + return gen_loss, loss_dict + + def infer_on_audio(self, aud_fn, id=None, initial_pose=None, norm_stats=None, w_pre=False, frame=None, am=None, am_sr=16000, **kwargs): + ''' + initial_pose: (B, C, T), normalized + (aud_fn, txgfile) -> generated motion (B, T, C) + ''' + output = [] + + # assert self.args.infer, "train mode" + self.generator.eval() + + if self.config.Data.pose.normalization: + assert norm_stats is not None + data_mean = norm_stats[0] + data_std = norm_stats[1] + + # assert initial_pose.shape[-1] == pre_length + if initial_pose is not None: + gt = initial_pose[:,:,:].permute(0, 2, 1).to(self.generator.device).to(torch.float32) + pre_poses = initial_pose[:,:,:15].permute(0, 2, 1).to(self.generator.device).to(torch.float32) + poses = initial_pose.permute(0, 2, 1).to(self.generator.device).to(torch.float32) + B = pre_poses.shape[0] + else: + gt = None + pre_poses=None + B = 1 + + if type(aud_fn) == torch.Tensor: + aud_feat = torch.tensor(aud_fn, dtype=torch.float32).to(self.generator.device) + num_poses_to_generate = aud_feat.shape[-1] + else: + aud_feat = get_mfcc_ta(aud_fn, am=am, am_sr=am_sr, fps=30, encoder_choice='faceformer') + aud_feat = aud_feat[np.newaxis, ...].repeat(B, axis=0) + aud_feat = torch.tensor(aud_feat, dtype=torch.float32).to(self.generator.device).transpose(1, 2) + if frame is None: + frame = aud_feat.shape[2]*30//16000 + # + if id is None: + id = torch.tensor([[0, 0, 0, 0]], dtype=torch.float32, device=self.generator.device) + else: + id = F.one_hot(id, self.num_classes).to(self.generator.device) + + with torch.no_grad(): + pred_poses = self.generator(aud_feat, pre_poses, id, time_steps=frame)[0] + pred_poses = pred_poses.cpu().numpy() + output = pred_poses + + if self.config.Data.pose.normalization: + output = denormalize(output, data_mean, data_std) + + return output + + + def generate(self, wv2_feat, frame): + ''' + initial_pose: (B, C, T), normalized + (aud_fn, txgfile) -> generated motion (B, T, C) + ''' + output = [] + + # assert self.args.infer, "train mode" + self.generator.eval() + + B = 1 + + id = torch.tensor([[0, 0, 0, 0]], dtype=torch.float32, device=self.generator.device) + id = id.repeat(wv2_feat.shape[0], 1) + + with torch.no_grad(): + pred_poses = self.generator(wv2_feat, None, id, time_steps=frame)[0] + return pred_poses diff --git a/nets/spg/__pycache__/gated_pixelcnn_v2.cpython-37.pyc b/nets/spg/__pycache__/gated_pixelcnn_v2.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ece9f92817f4608315a2b67b59b7459c8110bec3 Binary files /dev/null and b/nets/spg/__pycache__/gated_pixelcnn_v2.cpython-37.pyc differ diff --git a/nets/spg/__pycache__/s2g_face.cpython-37.pyc b/nets/spg/__pycache__/s2g_face.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..599d1a588db0bda078a96c19ed4b4d5bac83bc1e Binary files /dev/null and b/nets/spg/__pycache__/s2g_face.cpython-37.pyc differ diff --git a/nets/spg/__pycache__/s2glayers.cpython-37.pyc b/nets/spg/__pycache__/s2glayers.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5f4e388c2dceab39ec09ae40c74684ad1abaf0fd Binary files /dev/null and b/nets/spg/__pycache__/s2glayers.cpython-37.pyc differ diff --git a/nets/spg/__pycache__/vqvae_1d.cpython-37.pyc b/nets/spg/__pycache__/vqvae_1d.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..25d4b3ed10c2129681585f8739a7f1d0577959d9 Binary files /dev/null and b/nets/spg/__pycache__/vqvae_1d.cpython-37.pyc differ diff --git a/nets/spg/__pycache__/vqvae_modules.cpython-37.pyc b/nets/spg/__pycache__/vqvae_modules.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..32cbac6ed2e7578e249bd3b5b6685d8fcb912c3d Binary files /dev/null and b/nets/spg/__pycache__/vqvae_modules.cpython-37.pyc differ diff --git a/nets/spg/__pycache__/wav2vec.cpython-37.pyc b/nets/spg/__pycache__/wav2vec.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..caa869888be94fb2015514cc9badee717b41ca6a Binary files /dev/null and b/nets/spg/__pycache__/wav2vec.cpython-37.pyc differ diff --git a/nets/spg/gated_pixelcnn_v2.py b/nets/spg/gated_pixelcnn_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..934ae6e770ae5c1963b323faeb414e60a87c2479 --- /dev/null +++ b/nets/spg/gated_pixelcnn_v2.py @@ -0,0 +1,177 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + + +def weights_init(m): + classname = m.__class__.__name__ + if classname.find('Conv') != -1: + try: + nn.init.xavier_uniform_(m.weight.data) + m.bias.data.fill_(0) + except AttributeError: + print("Skipping initialization of ", classname) + + +class GatedActivation(nn.Module): + def __init__(self): + super().__init__() + + def forward(self, x): + x, y = x.chunk(2, dim=1) + return F.tanh(x) * F.sigmoid(y) + + +class GatedMaskedConv2d(nn.Module): + def __init__(self, mask_type, dim, kernel, residual=True, n_classes=10, bh_model=False): + super().__init__() + assert kernel % 2 == 1, print("Kernel size must be odd") + self.mask_type = mask_type + self.residual = residual + self.bh_model = bh_model + + self.class_cond_embedding = nn.Embedding( + n_classes, 2 * dim + ) + + kernel_shp = (kernel // 2 + 1, 3 if self.bh_model else 1) # (ceil(n/2), n) + padding_shp = (kernel // 2, 1 if self.bh_model else 0) + self.vert_stack = nn.Conv2d( + dim, dim * 2, + kernel_shp, 1, padding_shp + ) + + self.vert_to_horiz = nn.Conv2d(2 * dim, 2 * dim, 1) + + kernel_shp = (1, 2) + padding_shp = (0, 1) + self.horiz_stack = nn.Conv2d( + dim, dim * 2, + kernel_shp, 1, padding_shp + ) + + self.horiz_resid = nn.Conv2d(dim, dim, 1) + + self.gate = GatedActivation() + + def make_causal(self): + self.vert_stack.weight.data[:, :, -1].zero_() # Mask final row + self.horiz_stack.weight.data[:, :, :, -1].zero_() # Mask final column + + def forward(self, x_v, x_h, h): + if self.mask_type == 'A': + self.make_causal() + + h = self.class_cond_embedding(h) + h_vert = self.vert_stack(x_v) + h_vert = h_vert[:, :, :x_v.size(-2), :] + out_v = self.gate(h_vert + h[:, :, None, None]) + + if self.bh_model: + h_horiz = self.horiz_stack(x_h) + h_horiz = h_horiz[:, :, :, :x_h.size(-1)] + v2h = self.vert_to_horiz(h_vert) + + out = self.gate(v2h + h_horiz + h[:, :, None, None]) + if self.residual: + out_h = self.horiz_resid(out) + x_h + else: + out_h = self.horiz_resid(out) + else: + if self.residual: + out_v = self.horiz_resid(out_v) + x_v + else: + out_v = self.horiz_resid(out_v) + out_h = out_v + + return out_v, out_h + + +class GatedPixelCNN(nn.Module): + def __init__(self, input_dim=256, dim=64, n_layers=15, n_classes=10, audio=False, bh_model=False): + super().__init__() + self.dim = dim + self.audio = audio + self.bh_model = bh_model + + if self.audio: + self.embedding_aud = nn.Conv2d(256, dim, 1, 1, padding=0) + self.fusion_v = nn.Conv2d(dim * 2, dim, 1, 1, padding=0) + self.fusion_h = nn.Conv2d(dim * 2, dim, 1, 1, padding=0) + + # Create embedding layer to embed input + self.embedding = nn.Embedding(input_dim, dim) + + # Building the PixelCNN layer by layer + self.layers = nn.ModuleList() + + # Initial block with Mask-A convolution + # Rest with Mask-B convolutions + for i in range(n_layers): + mask_type = 'A' if i == 0 else 'B' + kernel = 7 if i == 0 else 3 + residual = False if i == 0 else True + + self.layers.append( + GatedMaskedConv2d(mask_type, dim, kernel, residual, n_classes, bh_model) + ) + + # Add the output layer + self.output_conv = nn.Sequential( + nn.Conv2d(dim, 512, 1), + nn.ReLU(True), + nn.Conv2d(512, input_dim, 1) + ) + + self.apply(weights_init) + + self.dp = nn.Dropout(0.1) + + def forward(self, x, label, aud=None): + shp = x.size() + (-1,) + x = self.embedding(x.view(-1)).view(shp) # (B, H, W, C) + x = x.permute(0, 3, 1, 2) # (B, C, W, W) + + x_v, x_h = (x, x) + for i, layer in enumerate(self.layers): + if i == 1 and self.audio is True: + aud = self.embedding_aud(aud) + a = torch.ones(aud.shape[-2]).to(aud.device) + a = self.dp(a) + aud = (aud.transpose(-1, -2) * a).transpose(-1, -2) + x_v = self.fusion_v(torch.cat([x_v, aud], dim=1)) + if self.bh_model: + x_h = self.fusion_h(torch.cat([x_h, aud], dim=1)) + x_v, x_h = layer(x_v, x_h, label) + + if self.bh_model: + return self.output_conv(x_h) + else: + return self.output_conv(x_v) + + def generate(self, label, shape=(8, 8), batch_size=64, aud_feat=None, pre_latents=None, pre_audio=None): + param = next(self.parameters()) + x = torch.zeros( + (batch_size, *shape), + dtype=torch.int64, device=param.device + ) + if pre_latents is not None: + x = torch.cat([pre_latents, x], dim=1) + aud_feat = torch.cat([pre_audio, aud_feat], dim=2) + h0 = pre_latents.shape[1] + h = h0 + shape[0] + else: + h0 = 0 + h = shape[0] + + for i in range(h0, h): + for j in range(shape[1]): + if self.audio: + logits = self.forward(x, label, aud_feat) + else: + logits = self.forward(x, label) + probs = F.softmax(logits[:, :, i, j], -1) + x.data[:, i, j].copy_( + probs.multinomial(1).squeeze().data + ) + return x[:, h0:h] diff --git a/nets/spg/s2g_face.py b/nets/spg/s2g_face.py new file mode 100644 index 0000000000000000000000000000000000000000..b221df6c7bff0912640dfffe46267f8a131cc829 --- /dev/null +++ b/nets/spg/s2g_face.py @@ -0,0 +1,226 @@ +''' +not exactly the same as the official repo but the results are good +''' +import sys +import os + +from transformers import Wav2Vec2Processor + +from .wav2vec import Wav2Vec2Model +from torchaudio.sox_effects import apply_effects_tensor + +sys.path.append(os.getcwd()) + +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +import torchaudio as ta +import math +from nets.layers import SeqEncoder1D, SeqTranslator1D, ConvNormRelu + + +""" from https://github.com/ai4r/Gesture-Generation-from-Trimodal-Context.git """ + + +def audio_chunking(audio: torch.Tensor, frame_rate: int = 30, chunk_size: int = 16000): + """ + :param audio: 1 x T tensor containing a 16kHz audio signal + :param frame_rate: frame rate for video (we need one audio chunk per video frame) + :param chunk_size: number of audio samples per chunk + :return: num_chunks x chunk_size tensor containing sliced audio + """ + samples_per_frame = 16000 // frame_rate + padding = (chunk_size - samples_per_frame) // 2 + audio = torch.nn.functional.pad(audio.unsqueeze(0), pad=[padding, padding]).squeeze(0) + anchor_points = list(range(chunk_size//2, audio.shape[-1]-chunk_size//2, samples_per_frame)) + audio = torch.cat([audio[:, i-chunk_size//2:i+chunk_size//2] for i in anchor_points], dim=0) + return audio + + +class MeshtalkEncoder(nn.Module): + def __init__(self, latent_dim: int = 128, model_name: str = 'audio_encoder'): + """ + :param latent_dim: size of the latent audio embedding + :param model_name: name of the model, used to load and save the model + """ + super().__init__() + + self.melspec = ta.transforms.MelSpectrogram( + sample_rate=16000, n_fft=2048, win_length=800, hop_length=160, n_mels=80 + ) + + conv_len = 5 + self.convert_dimensions = torch.nn.Conv1d(80, 128, kernel_size=conv_len) + self.weights_init(self.convert_dimensions) + self.receptive_field = conv_len + + convs = [] + for i in range(6): + dilation = 2 * (i % 3 + 1) + self.receptive_field += (conv_len - 1) * dilation + convs += [torch.nn.Conv1d(128, 128, kernel_size=conv_len, dilation=dilation)] + self.weights_init(convs[-1]) + self.convs = torch.nn.ModuleList(convs) + self.code = torch.nn.Linear(128, latent_dim) + + self.apply(lambda x: self.weights_init(x)) + + def weights_init(self, m): + if isinstance(m, torch.nn.Conv1d): + torch.nn.init.xavier_uniform_(m.weight) + try: + torch.nn.init.constant_(m.bias, .01) + except: + pass + + def forward(self, audio: torch.Tensor): + """ + :param audio: B x T x 16000 Tensor containing 1 sec of audio centered around the current time frame + :return: code: B x T x latent_dim Tensor containing a latent audio code/embedding + """ + B, T = audio.shape[0], audio.shape[1] + x = self.melspec(audio).squeeze(1) + x = torch.log(x.clamp(min=1e-10, max=None)) + if T == 1: + x = x.unsqueeze(1) + + # Convert to the right dimensionality + x = x.view(-1, x.shape[2], x.shape[3]) + x = F.leaky_relu(self.convert_dimensions(x), .2) + + # Process stacks + for conv in self.convs: + x_ = F.leaky_relu(conv(x), .2) + if self.training: + x_ = F.dropout(x_, .2) + l = (x.shape[2] - x_.shape[2]) // 2 + x = (x[:, :, l:-l] + x_) / 2 + + x = torch.mean(x, dim=-1) + x = x.view(B, T, x.shape[-1]) + x = self.code(x) + + return {"code": x} + + +class AudioEncoder(nn.Module): + def __init__(self, in_dim, out_dim, identity=False, num_classes=0): + super().__init__() + self.identity = identity + if self.identity: + in_dim = in_dim + 64 + self.id_mlp = nn.Conv1d(num_classes, 64, 1, 1) + self.first_net = SeqTranslator1D(in_dim, out_dim, + min_layers_num=3, + residual=True, + norm='ln' + ) + self.grus = nn.GRU(out_dim, out_dim, 1, batch_first=True) + self.dropout = nn.Dropout(0.1) + # self.att = nn.MultiheadAttention(out_dim, 4, dropout=0.1, batch_first=True) + + def forward(self, spectrogram, pre_state=None, id=None, time_steps=None): + + spectrogram = spectrogram + spectrogram = self.dropout(spectrogram) + if self.identity: + id = id.reshape(id.shape[0], -1, 1).repeat(1, 1, spectrogram.shape[2]).to(torch.float32) + id = self.id_mlp(id) + spectrogram = torch.cat([spectrogram, id], dim=1) + x1 = self.first_net(spectrogram)# .permute(0, 2, 1) + if time_steps is not None: + x1 = F.interpolate(x1, size=time_steps, align_corners=False, mode='linear') + # x1, _ = self.att(x1, x1, x1) + # x1, hidden_state = self.grus(x1) + # x1 = x1.permute(0, 2, 1) + hidden_state=None + + return x1, hidden_state + + +class Generator(nn.Module): + def __init__(self, + n_poses, + each_dim: list, + dim_list: list, + training=False, + device=None, + identity=True, + num_classes=0, + ): + super().__init__() + + self.training = training + self.device = device + self.gen_length = n_poses + self.identity = identity + + norm = 'ln' + in_dim = 256 + out_dim = 256 + + self.encoder_choice = 'faceformer' + + if self.encoder_choice == 'meshtalk': + self.audio_encoder = MeshtalkEncoder(latent_dim=in_dim) + elif self.encoder_choice == 'faceformer': + # wav2vec 2.0 weights initialization + self.audio_encoder = Wav2Vec2Model.from_pretrained("facebook/wav2vec2-base-960h") # "vitouphy/wav2vec2-xls-r-300m-phoneme""facebook/wav2vec2-base-960h" + self.audio_encoder.feature_extractor._freeze_parameters() + self.audio_feature_map = nn.Linear(768, in_dim) + else: + self.audio_encoder = AudioEncoder(in_dim=64, out_dim=out_dim) + + self.audio_middle = AudioEncoder(in_dim, out_dim, identity, num_classes) + + self.dim_list = dim_list + + self.decoder = nn.ModuleList() + self.final_out = nn.ModuleList() + + self.decoder.append(nn.Sequential( + ConvNormRelu(out_dim, 64, norm=norm), + ConvNormRelu(64, 64, norm=norm), + ConvNormRelu(64, 64, norm=norm), + )) + self.final_out.append(nn.Conv1d(64, each_dim[0], 1, 1)) + + self.decoder.append(nn.Sequential( + ConvNormRelu(out_dim, out_dim, norm=norm), + ConvNormRelu(out_dim, out_dim, norm=norm), + ConvNormRelu(out_dim, out_dim, norm=norm), + )) + self.final_out.append(nn.Conv1d(out_dim, each_dim[3], 1, 1)) + + def forward(self, in_spec, gt_poses=None, id=None, pre_state=None, time_steps=None): + if self.training: + time_steps = gt_poses.shape[1] + + # vector, hidden_state = self.audio_encoder(in_spec, pre_state, time_steps=time_steps) + if self.encoder_choice == 'meshtalk': + in_spec = audio_chunking(in_spec.squeeze(-1), frame_rate=30, chunk_size=16000) + feature = self.audio_encoder(in_spec.unsqueeze(0))["code"].transpose(1, 2) + elif self.encoder_choice == 'faceformer': + hidden_states = self.audio_encoder(in_spec.reshape(in_spec.shape[0], -1), frame_num=time_steps).last_hidden_state + feature = self.audio_feature_map(hidden_states).transpose(1, 2) + else: + feature, hidden_state = self.audio_encoder(in_spec, pre_state, time_steps=time_steps) + + # hidden_states = in_spec + + feature, _ = self.audio_middle(feature, id=id) + + out = [] + + for i in range(self.decoder.__len__()): + mid = self.decoder[i](feature) + mid = self.final_out[i](mid) + out.append(mid) + + out = torch.cat(out, dim=1) + out = out.transpose(1, 2) + + return out, None + + diff --git a/nets/spg/s2glayers.py b/nets/spg/s2glayers.py new file mode 100644 index 0000000000000000000000000000000000000000..2a439e6bc0c4973586d39f3b113aa3752ff077fa --- /dev/null +++ b/nets/spg/s2glayers.py @@ -0,0 +1,522 @@ +''' +not exactly the same as the official repo but the results are good +''' +import sys +import os + +sys.path.append(os.getcwd()) + +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +import math +from nets.layers import SeqEncoder1D, SeqTranslator1D + +""" from https://github.com/ai4r/Gesture-Generation-from-Trimodal-Context.git """ + + +class Conv2d_tf(nn.Conv2d): + """ + Conv2d with the padding behavior from TF + from https://github.com/mlperf/inference/blob/482f6a3beb7af2fb0bd2d91d6185d5e71c22c55f/others/edge/object_detection/ssd_mobilenet/pytorch/utils.py + """ + + def __init__(self, *args, **kwargs): + super(Conv2d_tf, self).__init__(*args, **kwargs) + self.padding = kwargs.get("padding", "SAME") + + def _compute_padding(self, input, dim): + input_size = input.size(dim + 2) + filter_size = self.weight.size(dim + 2) + effective_filter_size = (filter_size - 1) * self.dilation[dim] + 1 + out_size = (input_size + self.stride[dim] - 1) // self.stride[dim] + total_padding = max( + 0, (out_size - 1) * self.stride[dim] + effective_filter_size - input_size + ) + additional_padding = int(total_padding % 2 != 0) + + return additional_padding, total_padding + + def forward(self, input): + if self.padding == "VALID": + return F.conv2d( + input, + self.weight, + self.bias, + self.stride, + padding=0, + dilation=self.dilation, + groups=self.groups, + ) + rows_odd, padding_rows = self._compute_padding(input, dim=0) + cols_odd, padding_cols = self._compute_padding(input, dim=1) + if rows_odd or cols_odd: + input = F.pad(input, [0, cols_odd, 0, rows_odd]) + + return F.conv2d( + input, + self.weight, + self.bias, + self.stride, + padding=(padding_rows // 2, padding_cols // 2), + dilation=self.dilation, + groups=self.groups, + ) + + +class Conv1d_tf(nn.Conv1d): + """ + Conv1d with the padding behavior from TF + modified from https://github.com/mlperf/inference/blob/482f6a3beb7af2fb0bd2d91d6185d5e71c22c55f/others/edge/object_detection/ssd_mobilenet/pytorch/utils.py + """ + + def __init__(self, *args, **kwargs): + super(Conv1d_tf, self).__init__(*args, **kwargs) + self.padding = kwargs.get("padding") + + def _compute_padding(self, input, dim): + input_size = input.size(dim + 2) + filter_size = self.weight.size(dim + 2) + effective_filter_size = (filter_size - 1) * self.dilation[dim] + 1 + out_size = (input_size + self.stride[dim] - 1) // self.stride[dim] + total_padding = max( + 0, (out_size - 1) * self.stride[dim] + effective_filter_size - input_size + ) + additional_padding = int(total_padding % 2 != 0) + + return additional_padding, total_padding + + def forward(self, input): + # if self.padding == "valid": + # return F.conv1d( + # input, + # self.weight, + # self.bias, + # self.stride, + # padding=0, + # dilation=self.dilation, + # groups=self.groups, + # ) + rows_odd, padding_rows = self._compute_padding(input, dim=0) + if rows_odd: + input = F.pad(input, [0, rows_odd]) + + return F.conv1d( + input, + self.weight, + self.bias, + self.stride, + padding=(padding_rows // 2), + dilation=self.dilation, + groups=self.groups, + ) + + +def ConvNormRelu(in_channels, out_channels, type='1d', downsample=False, k=None, s=None, padding='valid', groups=1, + nonlinear='lrelu', bn='bn'): + if k is None and s is None: + if not downsample: + k = 3 + s = 1 + padding = 'same' + else: + k = 4 + s = 2 + padding = 'valid' + + if type == '1d': + conv_block = Conv1d_tf(in_channels, out_channels, kernel_size=k, stride=s, padding=padding, groups=groups) + norm_block = nn.BatchNorm1d(out_channels) + elif type == '2d': + conv_block = Conv2d_tf(in_channels, out_channels, kernel_size=k, stride=s, padding=padding, groups=groups) + norm_block = nn.BatchNorm2d(out_channels) + else: + assert False + if bn != 'bn': + if bn == 'gn': + norm_block = nn.GroupNorm(1, out_channels) + elif bn == 'ln': + norm_block = nn.LayerNorm(out_channels) + else: + norm_block = nn.Identity() + if nonlinear == 'lrelu': + nlinear = nn.LeakyReLU(0.2, True) + elif nonlinear == 'tanh': + nlinear = nn.Tanh() + elif nonlinear == 'none': + nlinear = nn.Identity() + + return nn.Sequential( + conv_block, + norm_block, + nlinear + ) + + +class UnetUp(nn.Module): + def __init__(self, in_ch, out_ch): + super(UnetUp, self).__init__() + self.conv = ConvNormRelu(in_ch, out_ch) + + def forward(self, x1, x2): + # x1 = torch.repeat_interleave(x1, 2, dim=2) + # x1 = x1[:, :, :x2.shape[2]] + x1 = torch.nn.functional.interpolate(x1, size=x2.shape[2], mode='linear') + x = x1 + x2 + x = self.conv(x) + return x + + +class UNet(nn.Module): + def __init__(self, input_dim, dim): + super(UNet, self).__init__() + # dim = 512 + self.down1 = nn.Sequential( + ConvNormRelu(input_dim, input_dim, '1d', False), + ConvNormRelu(input_dim, dim, '1d', False), + ConvNormRelu(dim, dim, '1d', False) + ) + self.gru = nn.GRU(dim, dim, 1, batch_first=True) + self.down2 = ConvNormRelu(dim, dim, '1d', True) + self.down3 = ConvNormRelu(dim, dim, '1d', True) + self.down4 = ConvNormRelu(dim, dim, '1d', True) + self.down5 = ConvNormRelu(dim, dim, '1d', True) + self.down6 = ConvNormRelu(dim, dim, '1d', True) + self.up1 = UnetUp(dim, dim) + self.up2 = UnetUp(dim, dim) + self.up3 = UnetUp(dim, dim) + self.up4 = UnetUp(dim, dim) + self.up5 = UnetUp(dim, dim) + + def forward(self, x1, pre_pose=None, w_pre=False): + x2_0 = self.down1(x1) + if w_pre: + i = 1 + x2_pre = self.gru(x2_0[:,:,0:i].permute(0,2,1), pre_pose[:,:,-1:].permute(2,0,1).contiguous())[0].permute(0,2,1) + x2 = torch.cat([x2_pre, x2_0[:,:,i:]], dim=-1) + # x2 = torch.cat([pre_pose, x2_0], dim=2) # [B, 512, 15] + else: + # x2 = self.gru(x2_0.transpose(1, 2))[0].transpose(1,2) + x2 = x2_0 + x3 = self.down2(x2) + x4 = self.down3(x3) + x5 = self.down4(x4) + x6 = self.down5(x5) + x7 = self.down6(x6) + x = self.up1(x7, x6) + x = self.up2(x, x5) + x = self.up3(x, x4) + x = self.up4(x, x3) + x = self.up5(x, x2) # [B, 512, 15] + return x, x2_0 + + +class AudioEncoder(nn.Module): + def __init__(self, n_frames, template_length, pose=False, common_dim=512): + super().__init__() + self.n_frames = n_frames + self.pose = pose + self.step = 0 + self.weight = 0 + if self.pose: + # self.first_net = nn.Sequential( + # ConvNormRelu(1, 64, '2d', False), + # ConvNormRelu(64, 64, '2d', True), + # ConvNormRelu(64, 128, '2d', False), + # ConvNormRelu(128, 128, '2d', True), + # ConvNormRelu(128, 256, '2d', False), + # ConvNormRelu(256, 256, '2d', True), + # ConvNormRelu(256, 256, '2d', False), + # ConvNormRelu(256, 256, '2d', False, padding='VALID') + # ) + # decoder_layer = nn.TransformerDecoderLayer(d_model=args.feature_dim, nhead=4, + # dim_feedforward=2 * args.feature_dim, batch_first=True) + # a = nn.TransformerDecoder + self.first_net = SeqTranslator1D(256, 256, + min_layers_num=4, + residual=True + ) + self.dropout_0 = nn.Dropout(0.1) + self.mu_fc = nn.Conv1d(256, 128, 1, 1) + self.var_fc = nn.Conv1d(256, 128, 1, 1) + self.trans_motion = SeqTranslator1D(common_dim, common_dim, + kernel_size=1, + stride=1, + min_layers_num=3, + residual=True + ) + # self.att = nn.MultiheadAttention(64 + template_length, 4, dropout=0.1) + self.unet = UNet(128 + template_length, common_dim) + + else: + self.first_net = SeqTranslator1D(256, 256, + min_layers_num=4, + residual=True + ) + self.dropout_0 = nn.Dropout(0.1) + # self.att = nn.MultiheadAttention(256, 4, dropout=0.1) + self.unet = UNet(256, 256) + self.dropout_1 = nn.Dropout(0.0) + + def forward(self, spectrogram, time_steps=None, template=None, pre_pose=None, w_pre=False): + self.step = self.step + 1 + if self.pose: + spect = spectrogram.transpose(1, 2) + if w_pre: + spect = spect[:, :, :] + + out = self.first_net(spect) + out = self.dropout_0(out) + + mu = self.mu_fc(out) + var = self.var_fc(out) + audio = self.__reparam(mu, var) + # audio = out + + # template = self.trans_motion(template) + x1 = torch.cat([audio, template], dim=1)#.permute(2,0,1) + # x1 = out + #x1, _ = self.att(x1, x1, x1) + #x1 = x1.permute(1,2,0) + x1, x2_0 = self.unet(x1, pre_pose=pre_pose, w_pre=w_pre) + else: + spectrogram = spectrogram.transpose(1, 2) + x1 = self.first_net(spectrogram)#.permute(2,0,1) + #out, _ = self.att(out, out, out) + #out = out.permute(1, 2, 0) + x1 = self.dropout_0(x1) + x1, x2_0 = self.unet(x1) + x1 = self.dropout_1(x1) + mu = None + var = None + + return x1, (mu, var), x2_0 + + def __reparam(self, mu, log_var): + std = torch.exp(0.5 * log_var) + eps = torch.randn_like(std, device='cuda') + z = eps * std + mu + return z + + +class Generator(nn.Module): + def __init__(self, + n_poses, + pose_dim, + pose, + n_pre_poses, + each_dim: list, + dim_list: list, + use_template=False, + template_length=0, + training=False, + device=None, + separate=False, + expression=False + ): + super().__init__() + + self.use_template = use_template + self.template_length = template_length + self.training = training + self.device = device + self.separate = separate + self.pose = pose + self.decoderf = True + self.expression = expression + + common_dim = 256 + + if self.use_template: + assert template_length > 0 + # self.KLLoss = KLLoss(kl_tolerance=self.config.Train.weights.kl_tolerance).to(self.device) + # self.pose_encoder = SeqEncoder1D( + # C_in=pose_dim, + # C_out=512, + # T_in=n_poses, + # min_layer_nums=6 + # + # ) + self.pose_encoder = SeqTranslator1D(pose_dim - 50, common_dim, + # kernel_size=1, + # stride=1, + min_layers_num=3, + residual=True + ) + self.mu_fc = nn.Conv1d(common_dim, template_length, kernel_size=1, stride=1) + self.var_fc = nn.Conv1d(common_dim, template_length, kernel_size=1, stride=1) + + else: + self.template_length = 0 + + self.gen_length = n_poses + + self.audio_encoder = AudioEncoder(n_poses, template_length, True, common_dim) + self.speech_encoder = AudioEncoder(n_poses, template_length, False) + + # self.pre_pose_encoder = SeqEncoder1D( + # C_in=pose_dim, + # C_out=128, + # T_in=15, + # min_layer_nums=3 + # + # ) + # self.pmu_fc = nn.Linear(128, 64) + # self.pvar_fc = nn.Linear(128, 64) + + self.pre_pose_encoder = SeqTranslator1D(pose_dim-50, common_dim, + min_layers_num=5, + residual=True + ) + self.decoder_in = 256 + 64 + self.dim_list = dim_list + + if self.separate: + self.decoder = nn.ModuleList() + self.final_out = nn.ModuleList() + + self.decoder.append(nn.Sequential( + ConvNormRelu(256, 64), + ConvNormRelu(64, 64), + ConvNormRelu(64, 64), + )) + self.final_out.append(nn.Conv1d(64, each_dim[0], 1, 1)) + + self.decoder.append(nn.Sequential( + ConvNormRelu(common_dim, common_dim), + ConvNormRelu(common_dim, common_dim), + ConvNormRelu(common_dim, common_dim), + )) + self.final_out.append(nn.Conv1d(common_dim, each_dim[1], 1, 1)) + + self.decoder.append(nn.Sequential( + ConvNormRelu(common_dim, common_dim), + ConvNormRelu(common_dim, common_dim), + ConvNormRelu(common_dim, common_dim), + )) + self.final_out.append(nn.Conv1d(common_dim, each_dim[2], 1, 1)) + + if self.expression: + self.decoder.append(nn.Sequential( + ConvNormRelu(256, 256), + ConvNormRelu(256, 256), + ConvNormRelu(256, 256), + )) + self.final_out.append(nn.Conv1d(256, each_dim[3], 1, 1)) + else: + self.decoder = nn.Sequential( + ConvNormRelu(self.decoder_in, 512), + ConvNormRelu(512, 512), + ConvNormRelu(512, 512), + ConvNormRelu(512, 512), + ConvNormRelu(512, 512), + ConvNormRelu(512, 512), + ) + self.final_out = nn.Conv1d(512, pose_dim, 1, 1) + + def __reparam(self, mu, log_var): + std = torch.exp(0.5 * log_var) + eps = torch.randn_like(std, device=self.device) + z = eps * std + mu + return z + + def forward(self, in_spec, pre_poses, gt_poses, template=None, time_steps=None, w_pre=False, norm=True): + if time_steps is not None: + self.gen_length = time_steps + + if self.use_template: + if self.training: + if w_pre: + in_spec = in_spec[:, 15:, :] + pre_pose = self.pre_pose_encoder(gt_poses[:, 14:15, :-50].permute(0, 2, 1)) + pose_enc = self.pose_encoder(gt_poses[:, 15:, :-50].permute(0, 2, 1)) + mu = self.mu_fc(pose_enc) + var = self.var_fc(pose_enc) + template = self.__reparam(mu, var) + else: + pre_pose = None + pose_enc = self.pose_encoder(gt_poses[:, :, :-50].permute(0, 2, 1)) + mu = self.mu_fc(pose_enc) + var = self.var_fc(pose_enc) + template = self.__reparam(mu, var) + elif pre_poses is not None: + if w_pre: + pre_pose = pre_poses[:, -1:, :-50] + if norm: + pre_pose = pre_pose.reshape(1, -1, 55, 5) + pre_pose = torch.cat([F.normalize(pre_pose[..., :3], dim=-1), + F.normalize(pre_pose[..., 3:5], dim=-1)], + dim=-1).reshape(1, -1, 275) + pre_pose = self.pre_pose_encoder(pre_pose.permute(0, 2, 1)) + template = torch.randn([in_spec.shape[0], self.template_length, self.gen_length ]).to( + in_spec.device) + else: + pre_pose = None + template = torch.randn([in_spec.shape[0], self.template_length, self.gen_length]).to(in_spec.device) + elif gt_poses is not None: + template = self.pre_pose_encoder(gt_poses[:, :, :-50].permute(0, 2, 1)) + elif template is None: + pre_pose = None + template = torch.randn([in_spec.shape[0], self.template_length, self.gen_length]).to(in_spec.device) + else: + template = None + mu = None + var = None + + a_t_f, (mu2, var2), x2_0 = self.audio_encoder(in_spec, time_steps=time_steps, template=template, pre_pose=pre_pose, w_pre=w_pre) + s_f, _, _ = self.speech_encoder(in_spec, time_steps=time_steps) + + out = [] + + if self.separate: + for i in range(self.decoder.__len__()): + if i == 0 or i == 3: + mid = self.decoder[i](s_f) + else: + mid = self.decoder[i](a_t_f) + mid = self.final_out[i](mid) + out.append(mid) + out = torch.cat(out, dim=1) + + else: + out = self.decoder(a_t_f) + out = self.final_out(out) + + out = out.transpose(1, 2) + + if self.training: + if w_pre: + return out, template, mu, var, (mu2, var2, x2_0, pre_pose) + else: + return out, template, mu, var, (mu2, var2, None, None) + else: + return out + + +class Discriminator(nn.Module): + def __init__(self, pose_dim, pose): + super().__init__() + self.net = nn.Sequential( + Conv1d_tf(pose_dim, 64, kernel_size=4, stride=2, padding='SAME'), + nn.LeakyReLU(0.2, True), + ConvNormRelu(64, 128, '1d', True), + ConvNormRelu(128, 256, '1d', k=4, s=1), + Conv1d_tf(256, 1, kernel_size=4, stride=1, padding='SAME'), + ) + + def forward(self, x): + x = x.transpose(1, 2) + + out = self.net(x) + return out + + +def main(): + d = Discriminator(275, 55) + x = torch.randn([8, 60, 275]) + result = d(x) + + +if __name__ == "__main__": + main() diff --git a/nets/spg/vqvae_1d.py b/nets/spg/vqvae_1d.py new file mode 100644 index 0000000000000000000000000000000000000000..0cd15bd6439b949bf89098af274b3e7ccac9b5f5 --- /dev/null +++ b/nets/spg/vqvae_1d.py @@ -0,0 +1,235 @@ +import os +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from .wav2vec import Wav2Vec2Model +from .vqvae_modules import VectorQuantizerEMA, ConvNormRelu, Res_CNR_Stack + + + +class AudioEncoder(nn.Module): + def __init__(self, in_dim, num_hiddens, num_residual_layers, num_residual_hiddens): + super(AudioEncoder, self).__init__() + self._num_hiddens = num_hiddens + self._num_residual_layers = num_residual_layers + self._num_residual_hiddens = num_residual_hiddens + + self.project = ConvNormRelu(in_dim, self._num_hiddens // 4, leaky=True) + + self._enc_1 = Res_CNR_Stack(self._num_hiddens // 4, self._num_residual_layers, leaky=True) + self._down_1 = ConvNormRelu(self._num_hiddens // 4, self._num_hiddens // 2, leaky=True, residual=True, + sample='down') + self._enc_2 = Res_CNR_Stack(self._num_hiddens // 2, self._num_residual_layers, leaky=True) + self._down_2 = ConvNormRelu(self._num_hiddens // 2, self._num_hiddens, leaky=True, residual=True, sample='down') + self._enc_3 = Res_CNR_Stack(self._num_hiddens, self._num_residual_layers, leaky=True) + + def forward(self, x, frame_num=0): + h = self.project(x) + h = self._enc_1(h) + h = self._down_1(h) + h = self._enc_2(h) + h = self._down_2(h) + h = self._enc_3(h) + return h + + +class Wav2VecEncoder(nn.Module): + def __init__(self, num_hiddens, num_residual_layers): + super(Wav2VecEncoder, self).__init__() + self._num_hiddens = num_hiddens + self._num_residual_layers = num_residual_layers + + self.audio_encoder = Wav2Vec2Model.from_pretrained( + "facebook/wav2vec2-base-960h") # "vitouphy/wav2vec2-xls-r-300m-phoneme""facebook/wav2vec2-base-960h" + self.audio_encoder.feature_extractor._freeze_parameters() + + self.project = ConvNormRelu(768, self._num_hiddens, leaky=True) + + self._enc_1 = Res_CNR_Stack(self._num_hiddens, self._num_residual_layers, leaky=True) + self._down_1 = ConvNormRelu(self._num_hiddens, self._num_hiddens, leaky=True, residual=True, sample='down') + self._enc_2 = Res_CNR_Stack(self._num_hiddens, self._num_residual_layers, leaky=True) + self._down_2 = ConvNormRelu(self._num_hiddens, self._num_hiddens, leaky=True, residual=True, sample='down') + self._enc_3 = Res_CNR_Stack(self._num_hiddens, self._num_residual_layers, leaky=True) + + def forward(self, x, frame_num): + h = self.audio_encoder(x.squeeze(), frame_num=frame_num).last_hidden_state.transpose(1, 2) + h = self.project(h) + h = self._enc_1(h) + h = self._down_1(h) + h = self._enc_2(h) + h = self._down_2(h) + h = self._enc_3(h) + return h + + +class Encoder(nn.Module): + def __init__(self, in_dim, embedding_dim, num_hiddens, num_residual_layers, num_residual_hiddens): + super(Encoder, self).__init__() + self._num_hiddens = num_hiddens + self._num_residual_layers = num_residual_layers + self._num_residual_hiddens = num_residual_hiddens + + self.project = ConvNormRelu(in_dim, self._num_hiddens // 4, leaky=True) + + self._enc_1 = Res_CNR_Stack(self._num_hiddens // 4, self._num_residual_layers, leaky=True) + self._down_1 = ConvNormRelu(self._num_hiddens // 4, self._num_hiddens // 2, leaky=True, residual=True, + sample='down') + self._enc_2 = Res_CNR_Stack(self._num_hiddens // 2, self._num_residual_layers, leaky=True) + self._down_2 = ConvNormRelu(self._num_hiddens // 2, self._num_hiddens, leaky=True, residual=True, sample='down') + self._enc_3 = Res_CNR_Stack(self._num_hiddens, self._num_residual_layers, leaky=True) + + self.pre_vq_conv = nn.Conv1d(self._num_hiddens, embedding_dim, 1, 1) + + def forward(self, x): + h = self.project(x) + h = self._enc_1(h) + h = self._down_1(h) + h = self._enc_2(h) + h = self._down_2(h) + h = self._enc_3(h) + h = self.pre_vq_conv(h) + return h + + +class Frame_Enc(nn.Module): + def __init__(self, in_dim, num_hiddens): + super(Frame_Enc, self).__init__() + self.in_dim = in_dim + self.num_hiddens = num_hiddens + + # self.enc = transformer_Enc(in_dim, num_hiddens, 2, 8, 256, 256, 256, 256, 0, dropout=0.1, n_position=4) + self.proj = nn.Conv1d(in_dim, num_hiddens, 1, 1) + self.enc = Res_CNR_Stack(num_hiddens, 2, leaky=True) + self.proj_1 = nn.Conv1d(256*4, num_hiddens, 1, 1) + self.proj_2 = nn.Conv1d(256*4, num_hiddens*2, 1, 1) + + def forward(self, x): + # x = self.enc(x, None)[0].reshape(x.shape[0], -1, 1) + x = self.enc(self.proj(x)).reshape(x.shape[0], -1, 1) + second_last = self.proj_2(x) + last = self.proj_1(x) + return second_last, last + + + +class Decoder(nn.Module): + def __init__(self, out_dim, embedding_dim, num_hiddens, num_residual_layers, num_residual_hiddens, ae=False): + super(Decoder, self).__init__() + self._num_hiddens = num_hiddens + self._num_residual_layers = num_residual_layers + self._num_residual_hiddens = num_residual_hiddens + + self.aft_vq_conv = nn.Conv1d(embedding_dim, self._num_hiddens, 1, 1) + + self._dec_1 = Res_CNR_Stack(self._num_hiddens, self._num_residual_layers, leaky=True) + self._up_2 = ConvNormRelu(self._num_hiddens, self._num_hiddens // 2, leaky=True, residual=True, sample='up') + self._dec_2 = Res_CNR_Stack(self._num_hiddens // 2, self._num_residual_layers, leaky=True) + self._up_3 = ConvNormRelu(self._num_hiddens // 2, self._num_hiddens // 4, leaky=True, residual=True, + sample='up') + self._dec_3 = Res_CNR_Stack(self._num_hiddens // 4, self._num_residual_layers, leaky=True) + + if ae: + self.frame_enc = Frame_Enc(out_dim, self._num_hiddens // 4) + self.gru_sl = nn.GRU(self._num_hiddens // 2, self._num_hiddens // 2, 1, batch_first=True) + self.gru_l = nn.GRU(self._num_hiddens // 4, self._num_hiddens // 4, 1, batch_first=True) + + self.project = nn.Conv1d(self._num_hiddens // 4, out_dim, 1, 1) + + def forward(self, h, last_frame=None): + + h = self.aft_vq_conv(h) + h = self._dec_1(h) + h = self._up_2(h) + h = self._dec_2(h) + h = self._up_3(h) + h = self._dec_3(h) + + recon = self.project(h) + return recon, None + + +class Pre_VQ(nn.Module): + def __init__(self, num_hiddens, embedding_dim, num_chunks): + super(Pre_VQ, self).__init__() + self.conv = nn.Conv1d(num_hiddens, num_hiddens, 1, 1, 0, groups=num_chunks) + self.bn = nn.GroupNorm(num_chunks, num_hiddens) + self.relu = nn.ReLU() + self.proj = nn.Conv1d(num_hiddens, embedding_dim, 1, 1, 0, groups=num_chunks) + + def forward(self, x): + x = self.conv(x) + x = self.bn(x) + x = self.relu(x) + x = self.proj(x) + return x + + +class VQVAE(nn.Module): + """VQ-VAE""" + + def __init__(self, in_dim, embedding_dim, num_embeddings, + num_hiddens, num_residual_layers, num_residual_hiddens, + commitment_cost=0.25, decay=0.99, share=False): + super().__init__() + self.in_dim = in_dim + self.embedding_dim = embedding_dim + self.num_embeddings = num_embeddings + self.share_code_vq = share + + self.encoder = Encoder(in_dim, embedding_dim, num_hiddens, num_residual_layers, num_residual_hiddens) + self.vq_layer = VectorQuantizerEMA(embedding_dim, num_embeddings, commitment_cost, decay) + self.decoder = Decoder(in_dim, embedding_dim, num_hiddens, num_residual_layers, num_residual_hiddens) + + def forward(self, gt_poses, id=None, pre_state=None): + z = self.encoder(gt_poses.transpose(1, 2)) + if not self.training: + e, _ = self.vq_layer(z) + x_recon, cur_state = self.decoder(e, pre_state.transpose(1, 2) if pre_state is not None else None) + return e, x_recon + + e, e_q_loss = self.vq_layer(z) + gt_recon, cur_state = self.decoder(e, pre_state.transpose(1, 2) if pre_state is not None else None) + + return e_q_loss, gt_recon.transpose(1, 2) + + def encode(self, gt_poses, id=None): + z = self.encoder(gt_poses.transpose(1, 2)) + e, latents = self.vq_layer(z) + return e, latents + + def decode(self, b, w, e=None, latents=None, pre_state=None): + if e is not None: + x = self.decoder(e, pre_state.transpose(1, 2) if pre_state is not None else None) + else: + e = self.vq_layer.quantize(latents) + e = e.view(b, w, -1).permute(0, 2, 1).contiguous() + x = self.decoder(e, pre_state.transpose(1, 2) if pre_state is not None else None) + return x + + +class AE(nn.Module): + """VQ-VAE""" + + def __init__(self, in_dim, embedding_dim, num_embeddings, + num_hiddens, num_residual_layers, num_residual_hiddens): + super().__init__() + self.in_dim = in_dim + self.embedding_dim = embedding_dim + self.num_embeddings = num_embeddings + + self.encoder = Encoder(in_dim, embedding_dim, num_hiddens, num_residual_layers, num_residual_hiddens) + self.decoder = Decoder(in_dim, embedding_dim, num_hiddens, num_residual_layers, num_residual_hiddens, True) + + def forward(self, gt_poses, id=None, pre_state=None): + z = self.encoder(gt_poses.transpose(1, 2)) + if not self.training: + x_recon, cur_state = self.decoder(z, pre_state.transpose(1, 2) if pre_state is not None else None) + return z, x_recon + gt_recon, cur_state = self.decoder(z, pre_state.transpose(1, 2) if pre_state is not None else None) + + return gt_recon.transpose(1, 2) + + def encode(self, gt_poses, id=None): + z = self.encoder(gt_poses.transpose(1, 2)) + return z diff --git a/nets/spg/vqvae_modules.py b/nets/spg/vqvae_modules.py new file mode 100644 index 0000000000000000000000000000000000000000..1b4de974514bb21691b16c4457b3111ec3713e84 --- /dev/null +++ b/nets/spg/vqvae_modules.py @@ -0,0 +1,378 @@ +import os +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from torchvision import datasets, transforms +import matplotlib.pyplot as plt + + +class CasualCT(nn.Module): + def __init__(self, + in_channels, + out_channels, + leaky=False, + p=0, + groups=1, ): + ''' + conv-bn-relu + ''' + super(CasualCT, self).__init__() + padding = 0 + kernel_size = 2 + stride = 2 + in_channels = in_channels * groups + out_channels = out_channels * groups + + self.conv = nn.ConvTranspose1d(in_channels=in_channels, out_channels=out_channels, + kernel_size=kernel_size, stride=stride, padding=padding, + groups=groups) + self.norm = nn.BatchNorm1d(out_channels) + self.dropout = nn.Dropout(p=p) + if leaky: + self.relu = nn.LeakyReLU(negative_slope=0.2) + else: + self.relu = nn.ReLU() + + def forward(self, x, **kwargs): + out = self.norm(self.dropout(self.conv(x))) + return self.relu(out) + + +class CasualConv(nn.Module): + def __init__(self, + in_channels, + out_channels, + leaky=False, + p=0, + groups=1, + downsample=False): + ''' + conv-bn-relu + ''' + super(CasualConv, self).__init__() + padding = 0 + kernel_size = 2 + stride = 1 + self.downsample = downsample + if self.downsample: + kernel_size = 2 + stride = 2 + + in_channels = in_channels * groups + out_channels = out_channels * groups + self.conv = nn.Conv1d(in_channels=in_channels, out_channels=out_channels, + kernel_size=kernel_size, stride=stride, padding=padding, + groups=groups) + self.norm = nn.BatchNorm1d(out_channels) + self.dropout = nn.Dropout(p=p) + if leaky: + self.relu = nn.LeakyReLU(negative_slope=0.2) + else: + self.relu = nn.ReLU() + + def forward(self, x, pre_state=None): + if not self.downsample: + if pre_state is not None: + x = torch.cat([pre_state, x], dim=-1) + else: + zeros = torch.zeros([x.shape[0], x.shape[1], 1], device=x.device) + x = torch.cat([zeros, x], dim=-1) + out = self.norm(self.dropout(self.conv(x))) + return self.relu(out) + + +class ConvNormRelu(nn.Module): + ''' + (B,C_in,H,W) -> (B, C_out, H, W) + there exist some kernel size that makes the result is not H/s + #TODO: there might some problems with residual + ''' + + def __init__(self, + in_channels, + out_channels, + leaky=False, + sample='none', + p=0, + groups=1, + residual=False, + norm='bn'): + ''' + conv-bn-relu + ''' + super(ConvNormRelu, self).__init__() + self.residual = residual + self.norm_type = norm + padding = 1 + + if sample == 'none': + kernel_size = 3 + stride = 1 + elif sample == 'one': + padding = 0 + kernel_size = stride = 1 + else: + kernel_size = 4 + stride = 2 + + if self.residual: + if sample == 'down': + self.residual_layer = nn.Conv1d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding) + elif sample == 'up': + self.residual_layer = nn.ConvTranspose1d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding) + else: + if in_channels == out_channels: + self.residual_layer = nn.Identity() + else: + self.residual_layer = nn.Sequential( + nn.Conv1d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding + ) + ) + + in_channels = in_channels * groups + out_channels = out_channels * groups + if sample == 'up': + self.conv = nn.ConvTranspose1d(in_channels=in_channels, out_channels=out_channels, + kernel_size=kernel_size, stride=stride, padding=padding, + groups=groups) + else: + self.conv = nn.Conv1d(in_channels=in_channels, out_channels=out_channels, + kernel_size=kernel_size, stride=stride, padding=padding, + groups=groups) + self.norm = nn.BatchNorm1d(out_channels) + self.dropout = nn.Dropout(p=p) + if leaky: + self.relu = nn.LeakyReLU(negative_slope=0.2) + else: + self.relu = nn.ReLU() + + def forward(self, x, **kwargs): + out = self.norm(self.dropout(self.conv(x))) + if self.residual: + residual = self.residual_layer(x) + out += residual + return self.relu(out) + + +class Res_CNR_Stack(nn.Module): + def __init__(self, + channels, + layers, + sample='none', + leaky=False, + casual=False, + ): + super(Res_CNR_Stack, self).__init__() + + if casual: + kernal_size = 1 + padding = 0 + conv = CasualConv + else: + kernal_size = 3 + padding = 1 + conv = ConvNormRelu + + if sample == 'one': + kernal_size = 1 + padding = 0 + + self._layers = nn.ModuleList() + for i in range(layers): + self._layers.append(conv(channels, channels, leaky=leaky, sample=sample)) + self.conv = nn.Conv1d(channels, channels, kernal_size, 1, padding) + self.norm = nn.BatchNorm1d(channels) + self.relu = nn.ReLU() + + def forward(self, x, pre_state=None): + # cur_state = [] + h = x + for i in range(self._layers.__len__()): + # cur_state.append(h[..., -1:]) + h = self._layers[i](h, pre_state=pre_state[i] if pre_state is not None else None) + h = self.norm(self.conv(h)) + return self.relu(h + x) + + +class ExponentialMovingAverage(nn.Module): + """Maintains an exponential moving average for a value. + + This module keeps track of a hidden exponential moving average that is + initialized as a vector of zeros which is then normalized to give the average. + This gives us a moving average which isn't biased towards either zero or the + initial value. Reference (https://arxiv.org/pdf/1412.6980.pdf) + + Initially: + hidden_0 = 0 + Then iteratively: + hidden_i = hidden_{i-1} - (hidden_{i-1} - value) * (1 - decay) + average_i = hidden_i / (1 - decay^i) + """ + + def __init__(self, init_value, decay): + super().__init__() + + self.decay = decay + self.counter = 0 + self.register_buffer("hidden", torch.zeros_like(init_value)) + + def forward(self, value): + self.counter += 1 + self.hidden.sub_((self.hidden - value) * (1 - self.decay)) + average = self.hidden / (1 - self.decay ** self.counter) + return average + + +class VectorQuantizerEMA(nn.Module): + """ + VQ-VAE layer: Input any tensor to be quantized. Use EMA to update embeddings. + Args: + embedding_dim (int): the dimensionality of the tensors in the + quantized space. Inputs to the modules must be in this format as well. + num_embeddings (int): the number of vectors in the quantized space. + commitment_cost (float): scalar which controls the weighting of the loss terms (see + equation 4 in the paper - this variable is Beta). + decay (float): decay for the moving averages. + epsilon (float): small float constant to avoid numerical instability. + """ + + def __init__(self, embedding_dim, num_embeddings, commitment_cost, decay, + epsilon=1e-5): + super().__init__() + self.embedding_dim = embedding_dim + self.num_embeddings = num_embeddings + self.commitment_cost = commitment_cost + self.epsilon = epsilon + + # initialize embeddings as buffers + embeddings = torch.empty(self.num_embeddings, self.embedding_dim) + nn.init.xavier_uniform_(embeddings) + self.register_buffer("embeddings", embeddings) + self.ema_dw = ExponentialMovingAverage(self.embeddings, decay) + + # also maintain ema_cluster_size, which record the size of each embedding + self.ema_cluster_size = ExponentialMovingAverage(torch.zeros((self.num_embeddings,)), decay) + + def forward(self, x): + # [B, C, H, W] -> [B, H, W, C] + x = x.permute(0, 2, 1).contiguous() + # [B, H, W, C] -> [BHW, C] + flat_x = x.reshape(-1, self.embedding_dim) + + encoding_indices = self.get_code_indices(flat_x) + quantized = self.quantize(encoding_indices) + quantized = quantized.view_as(x) # [B, W, C] + + if not self.training: + quantized = quantized.permute(0, 2, 1).contiguous() + return quantized, encoding_indices.view(quantized.shape[0], quantized.shape[2]) + + # update embeddings with EMA + with torch.no_grad(): + encodings = F.one_hot(encoding_indices, self.num_embeddings).float() + updated_ema_cluster_size = self.ema_cluster_size(torch.sum(encodings, dim=0)) + n = torch.sum(updated_ema_cluster_size) + updated_ema_cluster_size = ((updated_ema_cluster_size + self.epsilon) / + (n + self.num_embeddings * self.epsilon) * n) + dw = torch.matmul(encodings.t(), flat_x) # sum encoding vectors of each cluster + updated_ema_dw = self.ema_dw(dw) + normalised_updated_ema_w = ( + updated_ema_dw / updated_ema_cluster_size.reshape(-1, 1)) + self.embeddings.data = normalised_updated_ema_w + + # commitment loss + e_latent_loss = F.mse_loss(x, quantized.detach()) + loss = self.commitment_cost * e_latent_loss + + # Straight Through Estimator + quantized = x + (quantized - x).detach() + + quantized = quantized.permute(0, 2, 1).contiguous() + return quantized, loss + + def get_code_indices(self, flat_x): + # compute L2 distance + distances = ( + torch.sum(flat_x ** 2, dim=1, keepdim=True) + + torch.sum(self.embeddings ** 2, dim=1) - + 2. * torch.matmul(flat_x, self.embeddings.t()) + ) # [N, M] + encoding_indices = torch.argmin(distances, dim=1) # [N,] + return encoding_indices + + def quantize(self, encoding_indices): + """Returns embedding tensor for a batch of indices.""" + return F.embedding(encoding_indices, self.embeddings) + + + +class Casual_Encoder(nn.Module): + def __init__(self, in_dim, embedding_dim, num_hiddens, num_residual_layers, num_residual_hiddens): + super(Casual_Encoder, self).__init__() + self._num_hiddens = num_hiddens + self._num_residual_layers = num_residual_layers + self._num_residual_hiddens = num_residual_hiddens + + self.project = nn.Conv1d(in_dim, self._num_hiddens // 4, 1, 1) + self._enc_1 = Res_CNR_Stack(self._num_hiddens // 4, self._num_residual_layers, leaky=True, casual=True) + self._down_1 = CasualConv(self._num_hiddens // 4, self._num_hiddens // 2, leaky=True, downsample=True) + self._enc_2 = Res_CNR_Stack(self._num_hiddens // 2, self._num_residual_layers, leaky=True, casual=True) + self._down_2 = CasualConv(self._num_hiddens // 2, self._num_hiddens, leaky=True, downsample=True) + self._enc_3 = Res_CNR_Stack(self._num_hiddens, self._num_residual_layers, leaky=True, casual=True) + # self.pre_vq_conv = nn.Conv1d(self._num_hiddens, embedding_dim, 1, 1) + + def forward(self, x): + h = self.project(x) + h, _ = self._enc_1(h) + h = self._down_1(h) + h, _ = self._enc_2(h) + h = self._down_2(h) + h, _ = self._enc_3(h) + # h = self.pre_vq_conv(h) + return h + + +class Casual_Decoder(nn.Module): + def __init__(self, out_dim, embedding_dim, num_hiddens, num_residual_layers, num_residual_hiddens): + super(Casual_Decoder, self).__init__() + self._num_hiddens = num_hiddens + self._num_residual_layers = num_residual_layers + self._num_residual_hiddens = num_residual_hiddens + + # self.aft_vq_conv = nn.Conv1d(embedding_dim, self._num_hiddens, 1, 1) + self._dec_1 = Res_CNR_Stack(self._num_hiddens, self._num_residual_layers, leaky=True, casual=True) + self._up_2 = CasualCT(self._num_hiddens, self._num_hiddens // 2, leaky=True) + self._dec_2 = Res_CNR_Stack(self._num_hiddens // 2, self._num_residual_layers, leaky=True, casual=True) + self._up_3 = CasualCT(self._num_hiddens // 2, self._num_hiddens // 4, leaky=True) + self._dec_3 = Res_CNR_Stack(self._num_hiddens // 4, self._num_residual_layers, leaky=True, casual=True) + self.project = nn.Conv1d(self._num_hiddens//4, out_dim, 1, 1) + + def forward(self, h, pre_state=None): + cur_state = [] + # h = self.aft_vq_conv(x) + h, s = self._dec_1(h, pre_state[0] if pre_state is not None else None) + cur_state.append(s) + h = self._up_2(h) + h, s = self._dec_2(h, pre_state[1] if pre_state is not None else None) + cur_state.append(s) + h = self._up_3(h) + h, s = self._dec_3(h, pre_state[2] if pre_state is not None else None) + cur_state.append(s) + recon = self.project(h) + return recon, cur_state \ No newline at end of file diff --git a/nets/spg/wav2vec.py b/nets/spg/wav2vec.py new file mode 100644 index 0000000000000000000000000000000000000000..a5d0eff66e67de14ceba283fa6ce43f156c7ddc2 --- /dev/null +++ b/nets/spg/wav2vec.py @@ -0,0 +1,143 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +import numpy as np +import copy +import math +from transformers import Wav2Vec2Model,Wav2Vec2Config +from transformers.modeling_outputs import BaseModelOutput +from typing import Optional, Tuple +_CONFIG_FOR_DOC = "Wav2Vec2Config" + +# the implementation of Wav2Vec2Model is borrowed from https://huggingface.co/transformers/_modules/transformers/models/wav2vec2/modeling_wav2vec2.html#Wav2Vec2Model +# initialize our encoder with the pre-trained wav2vec 2.0 weights. +def _compute_mask_indices( + shape: Tuple[int, int], + mask_prob: float, + mask_length: int, + attention_mask: Optional[torch.Tensor] = None, + min_masks: int = 0, +) -> np.ndarray: + bsz, all_sz = shape + mask = np.full((bsz, all_sz), False) + + all_num_mask = int( + mask_prob * all_sz / float(mask_length) + + np.random.rand() + ) + all_num_mask = max(min_masks, all_num_mask) + mask_idcs = [] + padding_mask = attention_mask.ne(1) if attention_mask is not None else None + for i in range(bsz): + if padding_mask is not None: + sz = all_sz - padding_mask[i].long().sum().item() + num_mask = int( + mask_prob * sz / float(mask_length) + + np.random.rand() + ) + num_mask = max(min_masks, num_mask) + else: + sz = all_sz + num_mask = all_num_mask + + lengths = np.full(num_mask, mask_length) + + if sum(lengths) == 0: + lengths[0] = min(mask_length, sz - 1) + + min_len = min(lengths) + if sz - min_len <= num_mask: + min_len = sz - num_mask - 1 + + mask_idc = np.random.choice(sz - min_len, num_mask, replace=False) + mask_idc = np.asarray([mask_idc[j] + offset for j in range(len(mask_idc)) for offset in range(lengths[j])]) + mask_idcs.append(np.unique(mask_idc[mask_idc < sz])) + + min_len = min([len(m) for m in mask_idcs]) + for i, mask_idc in enumerate(mask_idcs): + if len(mask_idc) > min_len: + mask_idc = np.random.choice(mask_idc, min_len, replace=False) + mask[i, mask_idc] = True + return mask + +# linear interpolation layer +def linear_interpolation(features, input_fps, output_fps, output_len=None): + features = features.transpose(1, 2) + seq_len = features.shape[2] / float(input_fps) + if output_len is None: + output_len = int(seq_len * output_fps) + output_features = F.interpolate(features,size=output_len,align_corners=False,mode='linear') + return output_features.transpose(1, 2) + + +class Wav2Vec2Model(Wav2Vec2Model): + def __init__(self, config): + super().__init__(config) + def forward( + self, + input_values, + attention_mask=None, + output_attentions=None, + output_hidden_states=None, + return_dict=None, + frame_num=None + ): + self.config.output_attentions = True + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + hidden_states = self.feature_extractor(input_values) + hidden_states = hidden_states.transpose(1, 2) + + hidden_states = linear_interpolation(hidden_states, 50, 30,output_len=frame_num) + + if attention_mask is not None: + output_lengths = self._get_feat_extract_output_lengths(attention_mask.sum(-1)) + attention_mask = torch.zeros( + hidden_states.shape[:2], dtype=hidden_states.dtype, device=hidden_states.device + ) + attention_mask[ + (torch.arange(attention_mask.shape[0], device=hidden_states.device), output_lengths - 1) + ] = 1 + attention_mask = attention_mask.flip([-1]).cumsum(-1).flip([-1]).bool() + + hidden_states = self.feature_projection(hidden_states) + + if self.config.apply_spec_augment and self.training: + batch_size, sequence_length, hidden_size = hidden_states.size() + if self.config.mask_time_prob > 0: + mask_time_indices = _compute_mask_indices( + (batch_size, sequence_length), + self.config.mask_time_prob, + self.config.mask_time_length, + attention_mask=attention_mask, + min_masks=2, + ) + hidden_states[torch.from_numpy(mask_time_indices)] = self.masked_spec_embed.to(hidden_states.dtype) + if self.config.mask_feature_prob > 0: + mask_feature_indices = _compute_mask_indices( + (batch_size, hidden_size), + self.config.mask_feature_prob, + self.config.mask_feature_length, + ) + mask_feature_indices = torch.from_numpy(mask_feature_indices).to(hidden_states.device) + hidden_states[mask_feature_indices[:, None].expand(-1, sequence_length, -1)] = 0 + encoder_outputs = self.encoder( + hidden_states[0], + attention_mask=attention_mask, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + hidden_states = encoder_outputs[0] + if not return_dict: + return (hidden_states,) + encoder_outputs[1:] + + return BaseModelOutput( + last_hidden_state=hidden_states, + hidden_states=encoder_outputs.hidden_states, + attentions=encoder_outputs.attentions, + ) diff --git a/nets/utils.py b/nets/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..41b03374e8023016b3bec4f66ab16cb421222ef1 --- /dev/null +++ b/nets/utils.py @@ -0,0 +1,122 @@ +import json +import textgrid as tg +import numpy as np + +def get_parameter_size(model): + total_num = sum(p.numel() for p in model.parameters()) + trainable_num = sum(p.numel() for p in model.parameters() if p.requires_grad) + return total_num, trainable_num + +def denormalize(kps, data_mean, data_std): + ''' + kps: (B, T, C) + ''' + data_std = data_std.reshape(1, 1, -1) + data_mean = data_mean.reshape(1, 1, -1) + return (kps * data_std) + data_mean + +def normalize(kps, data_mean, data_std): + ''' + kps: (B, T, C) + ''' + data_std = data_std.squeeze().reshape(1, 1, -1) + data_mean = data_mean.squeeze().reshape(1, 1, -1) + + return (kps-data_mean) / data_std + +def parse_audio(textgrid_file): + '''a demo implementation''' + words=['but', 'as', 'to', 'that', 'with', 'of', 'the', 'and', 'or', 'not', 'which', 'what', 'this', 'for', 'because', 'if', 'so', 'just', 'about', 'like', 'by', 'how', 'from', 'whats', 'now', 'very', 'that', 'also', 'actually', 'who', 'then', 'well', 'where', 'even', 'today', 'between', 'than', 'when'] + txt=tg.TextGrid.fromFile(textgrid_file) + + total_time=int(np.ceil(txt.maxTime)) + code_seq=np.zeros(total_time) + + word_level=txt[0] + + for i in range(len(word_level)): + start_time=word_level[i].minTime + end_time=word_level[i].maxTime + mark=word_level[i].mark + + if mark in words: + start=int(np.round(start_time)) + end=int(np.round(end_time)) + + if start >= len(code_seq) or end >= len(code_seq): + code_seq[-1] = 1 + else: + code_seq[start]=1 + + return code_seq + + +def get_path(model_name, model_type): + if model_name == 's2g_body_pixel': + if model_type == 'mfcc': + return './experiments/2022-10-09-smplx_S2G-body-pixel-aud-3p/ckpt-99.pth' + elif model_type == 'wv2': + return './experiments/2022-10-28-smplx_S2G-body-pixel-wv2-sg2/ckpt-99.pth' + elif model_type == 'random': + return './experiments/2022-10-09-smplx_S2G-body-pixel-random-3p/ckpt-99.pth' + elif model_type == 'wbhmodel': + return './experiments/2022-11-02-smplx_S2G-body-pixel-w-bhmodel/ckpt-99.pth' + elif model_type == 'wobhmodel': + return './experiments/2022-11-02-smplx_S2G-body-pixel-wo-bhmodel/ckpt-99.pth' + elif model_name == 's2g_body': + if model_type == 'a+m-vae': + return './experiments/2022-10-19-smplx_S2G-body-audio-motion-vae/ckpt-99.pth' + elif model_type == 'a-vae': + return './experiments/2022-10-18-smplx_S2G-body-audiovae/ckpt-99.pth' + elif model_type == 'a-ed': + return './experiments/2022-10-18-smplx_S2G-body-audioae/ckpt-99.pth' + elif model_name == 's2g_LS3DCG': + return './experiments/2022-10-19-smplx_S2G-LS3DCG/ckpt-99.pth' + elif model_name == 's2g_body_vq': + if model_type == 'n_com_1024': + return './experiments/2022-10-29-smplx_S2G-body-vq-cn1024/ckpt-99.pth' + elif model_type == 'n_com_2048': + return './experiments/2022-10-29-smplx_S2G-body-vq-cn2048/ckpt-99.pth' + elif model_type == 'n_com_4096': + return './experiments/2022-10-29-smplx_S2G-body-vq-cn4096/ckpt-99.pth' + elif model_type == 'n_com_8192': + return './experiments/2022-11-02-smplx_S2G-body-vq-cn8192/ckpt-99.pth' + elif model_type == 'n_com_16384': + return './experiments/2022-11-02-smplx_S2G-body-vq-cn16384/ckpt-99.pth' + elif model_type == 'n_com_170000': + return './experiments/2022-10-30-smplx_S2G-body-vq-cn170000/ckpt-99.pth' + elif model_type == 'com_1024': + return './experiments/2022-10-29-smplx_S2G-body-vq-composition/ckpt-99.pth' + elif model_type == 'com_2048': + return './experiments/2022-10-31-smplx_S2G-body-vq-composition2048/ckpt-99.pth' + elif model_type == 'com_4096': + return './experiments/2022-10-31-smplx_S2G-body-vq-composition4096/ckpt-99.pth' + elif model_type == 'com_8192': + return './experiments/2022-11-02-smplx_S2G-body-vq-composition8192/ckpt-99.pth' + elif model_type == 'com_16384': + return './experiments/2022-11-02-smplx_S2G-body-vq-composition16384/ckpt-99.pth' + + +def get_dpath(model_name, model_type): + if model_name == 's2g_body_pixel': + if model_type == 'audio': + return './experiments/2022-10-26-smplx_S2G-d-pixel-aud/ckpt-9.pth' + elif model_type == 'wv2': + return './experiments/2022-11-04-smplx_S2G-d-pixel-wv2/ckpt-9.pth' + elif model_type == 'random': + return './experiments/2022-10-26-smplx_S2G-d-pixel-random/ckpt-9.pth' + elif model_type == 'wbhmodel': + return './experiments/2022-11-10-smplx_S2G-hD-wbhmodel/ckpt-9.pth' + # return './experiments/2022-11-05-smplx_S2G-d-pixel-wbhmodel/ckpt-9.pth' + elif model_type == 'wobhmodel': + return './experiments/2022-11-10-smplx_S2G-hD-wobhmodel/ckpt-9.pth' + # return './experiments/2022-11-05-smplx_S2G-d-pixel-wobhmodel/ckpt-9.pth' + elif model_name == 's2g_body': + if model_type == 'a+m-vae': + return './experiments/2022-10-26-smplx_S2G-d-audio+motion-vae/ckpt-9.pth' + elif model_type == 'a-vae': + return './experiments/2022-10-26-smplx_S2G-d-audio-vae/ckpt-9.pth' + elif model_type == 'a-ed': + return './experiments/2022-10-26-smplx_S2G-d-audio-ae/ckpt-9.pth' + elif model_name == 's2g_LS3DCG': + return './experiments/2022-10-26-smplx_S2G-d-ls3dcg/ckpt-9.pth' \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..548122300ce1074748d48a1ec0956188317494ab --- /dev/null +++ b/requirements.txt @@ -0,0 +1,21 @@ +numpy~=1.21.6 +transformers~=4.22.2 +torch~=1.10.1 +torchaudio~=0.10.1 +matplotlib~=3.2.2 +torchvision~=0.11.2 +textgrid~=1.5 +scikit-learn~=1.0.2 +smplx~=0.1.28 +opencv-python~=4.7.0.72 +pyrender~=0.1.45 +trimesh~=3.14.1 +gradio~=3.24.1 +tqdm~=4.64.1 +librosa~=0.9.2 +scipy~=1.7.3 +pandas~=1.3.5 +zmq~=0.0.0 +pyzmq~=25.0.2 +pillow~=9.2.0 +setuptools~=65.6.3 \ No newline at end of file diff --git a/scripts/.idea/__init__.py b/scripts/.idea/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/scripts/.idea/aws.xml b/scripts/.idea/aws.xml new file mode 100644 index 0000000000000000000000000000000000000000..b63b642cfb4254fc0f7058903abc5b481895c4ef --- /dev/null +++ b/scripts/.idea/aws.xml @@ -0,0 +1,11 @@ + + + + + + \ No newline at end of file diff --git a/scripts/.idea/deployment.xml b/scripts/.idea/deployment.xml new file mode 100644 index 0000000000000000000000000000000000000000..14f2c41a46d0210c6395ed0e7bfd3b630211f699 --- /dev/null +++ b/scripts/.idea/deployment.xml @@ -0,0 +1,70 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/scripts/.idea/get_prevar.py b/scripts/.idea/get_prevar.py new file mode 100644 index 0000000000000000000000000000000000000000..d4b2dfb1892e9ff79c8074f35e84d897c64ff673 --- /dev/null +++ b/scripts/.idea/get_prevar.py @@ -0,0 +1,132 @@ +import os +import sys +os.environ['CUDA_VISIBLE_DEVICES'] = '0' + +sys.path.append(os.getcwd()) +from glob import glob + +import numpy as np +import json +import smplx as smpl + +from nets import * +from repro_nets import * +from trainer.options import parse_args +from data_utils import torch_data +from trainer.config import load_JsonConfig + +import torch +import torch.nn as nn +import torch.nn.functional as F +from torch.utils import data + +def init_model(model_name, model_path, args, config): + if model_name == 'freeMo': + # generator = freeMo_Generator(args) + # generator = freeMo_Generator(args) + generator = freeMo_dev(args, config) + # generator.load_state_dict(torch.load(model_path)['generator']) + elif model_name == 'smplx_S2G': + generator = smplx_S2G(args, config) + elif model_name == 'StyleGestures': + generator = StyleGesture_Generator( + args, + config + ) + elif model_name == 'Audio2Gestures': + config.Train.using_mspec_stat = False + generator = Audio2Gesture_Generator( + args, + config, + torch.zeros([1, 1, 108]), + torch.ones([1, 1, 108]) + ) + elif model_name == 'S2G': + generator = S2G_Generator( + args, + config, + ) + elif model_name == 'Tmpt': + generator = S2G_Generator( + args, + config, + ) + else: + raise NotImplementedError + + model_ckpt = torch.load(model_path, map_location=torch.device('cpu')) + if model_name == 'smplx_S2G': + generator.generator.load_state_dict(model_ckpt['generator']['generator']) + elif 'generator' in list(model_ckpt.keys()): + generator.load_state_dict(model_ckpt['generator']) + else: + model_ckpt = {'generator': model_ckpt} + generator.load_state_dict(model_ckpt) + + return generator + + + +def prevar_loader(data_root, speakers, args, config, model_path, device, generator): + path = model_path.split('ckpt')[0] + file = os.path.join(os.path.dirname(path), "pre_variable.npy") + data_base = torch_data( + data_root=data_root, + speakers=speakers, + split='pre', + limbscaling=False, + normalization=config.Data.pose.normalization, + norm_method=config.Data.pose.norm_method, + split_trans_zero=False, + num_pre_frames=config.Data.pose.pre_pose_length, + num_generate_length=config.Data.pose.generate_length, + num_frames=15, + aud_feat_win_size=config.Data.aud.aud_feat_win_size, + aud_feat_dim=config.Data.aud.aud_feat_dim, + feat_method=config.Data.aud.feat_method, + smplx=True, + audio_sr=22000, + convert_to_6d=config.Data.pose.convert_to_6d, + expression=config.Data.pose.expression + ) + + data_base.get_dataset() + pre_set = data_base.all_dataset + pre_loader = data.DataLoader(pre_set, batch_size=config.DataLoader.batch_size, shuffle=False, drop_last=True) + + total_pose = [] + + with torch.no_grad(): + for bat in pre_loader: + pose = bat['poses'].to(device).to(torch.float32) + expression = bat['expression'].to(device).to(torch.float32) + pose = pose.permute(0, 2, 1) + pose = torch.cat([pose[:, :15], pose[:, 15:30], pose[:, 30:45], pose[:, 45:60], pose[:, 60:]], dim=0) + expression = expression.permute(0, 2, 1) + expression = torch.cat([expression[:, :15], expression[:, 15:30], expression[:, 30:45], expression[:, 45:60], expression[:, 60:]], dim=0) + pose = torch.cat([pose, expression], dim=-1) + pose = pose.reshape(pose.shape[0], -1, 1) + pose_code = generator.generator.pre_pose_encoder(pose).squeeze().detach().cpu() + total_pose.append(np.asarray(pose_code)) + total_pose = np.concatenate(total_pose, axis=0) + mean = np.mean(total_pose, axis=0) + std = np.std(total_pose, axis=0) + prevar = (mean, std) + np.save(file, prevar, allow_pickle=True) + + return mean, std + +def main(): + parser = parse_args() + args = parser.parse_args() + device = torch.device(args.gpu) + torch.cuda.set_device(device) + + config = load_JsonConfig(args.config_file) + + print('init model...') + generator = init_model(config.Model.model_name, args.model_path, args, config) + print('init pre-pose vectors...') + mean, std = prevar_loader(config.Data.data_root, args.speakers, args, config, args.model_path, device, generator) + +main() \ No newline at end of file diff --git a/scripts/.idea/inspectionProfiles/Project_Default.xml b/scripts/.idea/inspectionProfiles/Project_Default.xml new file mode 100644 index 0000000000000000000000000000000000000000..3dce9c67a3cba33789d113124d53150ccca2370b --- /dev/null +++ b/scripts/.idea/inspectionProfiles/Project_Default.xml @@ -0,0 +1,12 @@ + + + + \ No newline at end of file diff --git a/scripts/.idea/inspectionProfiles/profiles_settings.xml b/scripts/.idea/inspectionProfiles/profiles_settings.xml new file mode 100644 index 0000000000000000000000000000000000000000..105ce2da2d6447d11dfe32bfb846c3d5b199fc99 --- /dev/null +++ b/scripts/.idea/inspectionProfiles/profiles_settings.xml @@ -0,0 +1,6 @@ + + + + \ No newline at end of file diff --git a/scripts/.idea/lower body b/scripts/.idea/lower body new file mode 100644 index 0000000000000000000000000000000000000000..1efda13cfb1455b382ced16ed1ddb16d1716ae7f --- /dev/null +++ b/scripts/.idea/lower body @@ -0,0 +1 @@ +0, 1, 3, 4, 6, 7, 9, 10, \ No newline at end of file diff --git a/scripts/.idea/modules.xml b/scripts/.idea/modules.xml new file mode 100644 index 0000000000000000000000000000000000000000..bb83e262159915cb1ea30b748c3123878bf4c341 --- /dev/null +++ b/scripts/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/scripts/.idea/scripts.iml b/scripts/.idea/scripts.iml new file mode 100644 index 0000000000000000000000000000000000000000..d0876a78d06ac03b5d78c8dcdb95570281c6f1d6 --- /dev/null +++ b/scripts/.idea/scripts.iml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/scripts/.idea/test.png b/scripts/.idea/test.png new file mode 100644 index 0000000000000000000000000000000000000000..b028ad2b8c4fb0b6a89571ae62d2654908c93d9c Binary files /dev/null and b/scripts/.idea/test.png differ diff --git a/scripts/.idea/testtext.py b/scripts/.idea/testtext.py new file mode 100644 index 0000000000000000000000000000000000000000..af0185442ae8b1e84c9ea64c671dde6da394046c --- /dev/null +++ b/scripts/.idea/testtext.py @@ -0,0 +1,24 @@ +import cv2 + +# path being defined from where the system will read the image +path = r'test.png' +# command used for reading an image from the disk disk, cv2.imread function is used +image1 = cv2.imread(path) +# Window name being specified where the image will be displayed +window_name1 = 'image' +# font for the text being specified +font1 = cv2.FONT_HERSHEY_SIMPLEX +# org for the text being specified +org1 = (50, 50) +# font scale for the text being specified +fontScale1 = 1 +# Blue color for the text being specified from BGR +color1 = (255, 255, 255) +# Line thickness for the text being specified at 2 px +thickness1 = 2 +# Using the cv2.putText() method for inserting text in the image of the specified path +image_1 = cv2.putText(image1, 'CAT IN BOX', org1, font1, fontScale1, color1, thickness1, cv2.LINE_AA) +# Displaying the output image +cv2.imshow(window_name1, image_1) +cv2.waitKey(0) +cv2.destroyAllWindows() diff --git a/scripts/.idea/vcs.xml b/scripts/.idea/vcs.xml new file mode 100644 index 0000000000000000000000000000000000000000..6c0b8635858dc7ad44b93df54b762707ce49eefc --- /dev/null +++ b/scripts/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/scripts/.idea/workspace.xml b/scripts/.idea/workspace.xml new file mode 100644 index 0000000000000000000000000000000000000000..e45519a6841bc50a93d2d3bdb05aaa935ff861a0 --- /dev/null +++ b/scripts/.idea/workspace.xml @@ -0,0 +1,75 @@ + + + + + + + + + + + + + + + + + + +