From 6d108309bf6892240e5c15b527bc4f5178afed9a Mon Sep 17 00:00:00 2001 From: Peizhuo Li Date: Sun, 22 Aug 2021 22:17:58 +0800 Subject: [PATCH] Remove deprecated code --- dataset/smpl_layer/README.md | 2 - dataset/smpl_layer/posemapper.py | 31 ---- dataset/smpl_layer/rodrigues_layer.py | 55 ------- dataset/smpl_layer/serialization.py | 60 -------- dataset/smpl_layer/smpl_layer.py | 205 -------------------------- dataset/smpl_layer/tensutils.py | 53 ------- 6 files changed, 406 deletions(-) delete mode 100644 dataset/smpl_layer/README.md delete mode 100644 dataset/smpl_layer/posemapper.py delete mode 100644 dataset/smpl_layer/rodrigues_layer.py delete mode 100644 dataset/smpl_layer/serialization.py delete mode 100644 dataset/smpl_layer/smpl_layer.py delete mode 100644 dataset/smpl_layer/tensutils.py diff --git a/dataset/smpl_layer/README.md b/dataset/smpl_layer/README.md deleted file mode 100644 index c9b77bf..0000000 --- a/dataset/smpl_layer/README.md +++ /dev/null @@ -1,2 +0,0 @@ -This code is adapted from -https://github.com/gulvarol/smplpytorch diff --git a/dataset/smpl_layer/posemapper.py b/dataset/smpl_layer/posemapper.py deleted file mode 100644 index 88a2ed7..0000000 --- a/dataset/smpl_layer/posemapper.py +++ /dev/null @@ -1,31 +0,0 @@ -import chumpy as ch -import numpy as np -import cv2 - - -class Rodrigues(ch.Ch): - dterms = 'rt' - - def compute_r(self): - return cv2.Rodrigues(self.rt.r)[0] - - def compute_dr_wrt(self, wrt): - if wrt is self.rt: - return cv2.Rodrigues(self.rt.r)[1].T - - -def lrotmin(p): - if isinstance(p, np.ndarray): - p = p.ravel()[3:] - return np.concatenate([(cv2.Rodrigues(np.array(pp))[0] - np.eye(3)).ravel() for pp in p.reshape((-1, 3))]).ravel() - if p.ndim != 2 or p.shape[1] != 3: - p = p.reshape((-1, 3)) - p = p[1:] - return ch.concatenate([(Rodrigues(pp) - ch.eye(3)).ravel() for pp in p]).ravel() - - -def posemap(s): - if s == 'lrotmin': - return lrotmin - else: - raise Exception('Unknown posemapping: %s' % (str(s),)) diff --git a/dataset/smpl_layer/rodrigues_layer.py b/dataset/smpl_layer/rodrigues_layer.py deleted file mode 100644 index ac27adc..0000000 --- a/dataset/smpl_layer/rodrigues_layer.py +++ /dev/null @@ -1,55 +0,0 @@ -""" -This part reuses code from https://github.com/MandyMo/pytorch_HMR/blob/master/src/util.py -which is part of a PyTorch port of SMPL. -Thanks to Zhang Xiong (MandyMo) for making this great code available on github ! -""" - -import torch - - -def quat2mat(quat): - """Convert quaternion coefficients to rotation matrix. - Args: - quat: size = [batch_size, 4] 4 <===>(w, x, y, z) - Returns: - Rotation matrix corresponding to the quaternion -- size = [batch_size, 3, 3] - """ - norm_quat = quat - norm_quat = norm_quat / norm_quat.norm(p=2, dim=1, keepdim=True) - w, x, y, z = norm_quat[:, 0], norm_quat[:, 1], norm_quat[:, - 2], norm_quat[:, - 3] - - batch_size = quat.size(0) - - w2, x2, y2, z2 = w.pow(2), x.pow(2), y.pow(2), z.pow(2) - wx, wy, wz = w * x, w * y, w * z - xy, xz, yz = x * y, x * z, y * z - - rotMat = torch.stack([ - w2 + x2 - y2 - z2, 2 * xy - 2 * wz, 2 * wy + 2 * xz, 2 * wz + 2 * xy, - w2 - x2 + y2 - z2, 2 * yz - 2 * wx, 2 * xz - 2 * wy, 2 * wx + 2 * yz, - w2 - x2 - y2 + z2 - ], - dim=1).view(batch_size, 3, 3) - return rotMat - - -def batch_rodrigues(axisang): - #axisang N x 3 - axisang_norm = torch.norm(axisang + 1e-8, p=2, dim=1) - angle = torch.unsqueeze(axisang_norm, -1) - axisang_normalized = torch.div(axisang, angle) - angle = angle * 0.5 - v_cos = torch.cos(angle) - v_sin = torch.sin(angle) - quat = torch.cat([v_cos, v_sin * axisang_normalized], dim=1) - rot_mat = quat2mat(quat) - rot_mat = rot_mat.view(rot_mat.shape[0], 9) - return rot_mat - - -def th_get_axis_angle(vector): - angle = torch.norm(vector, 2, 1) - axes = vector / angle.unsqueeze(1) - return axes, angle diff --git a/dataset/smpl_layer/serialization.py b/dataset/smpl_layer/serialization.py deleted file mode 100644 index 36bf51b..0000000 --- a/dataset/smpl_layer/serialization.py +++ /dev/null @@ -1,60 +0,0 @@ -def ready_arguments(fname_or_dict, highRes): - import numpy as np - import pickle - import chumpy as ch - from chumpy.ch import MatVecMult - from dataset.smpl_layer.posemapper import posemap - import scipy.sparse as sp - - if not isinstance(fname_or_dict, dict): - dd = pickle.load(open(fname_or_dict, 'rb'), encoding='latin1') - # dd = pickle.load(open(fname_or_dict, 'rb')) - else: - dd = fname_or_dict - - want_shapemodel = 'shapedirs' in dd - nposeparms = dd['kintree_table'].shape[1] * 3 - - if 'trans' not in dd: - dd['trans'] = np.zeros(3) - if 'pose' not in dd: - dd['pose'] = np.zeros(nposeparms) - if 'shapedirs' in dd and 'betas' not in dd: - dd['betas'] = np.zeros(dd['shapedirs'].shape[-1]) - - for s in ['v_template', 'weights', 'posedirs', 'pose', 'trans', 'shapedirs', 'betas', 'J']: - if (s in dd) and isinstance(dd[s], ch.ch.Ch): - dd[s] = dd[s].r - - if want_shapemodel: - dd['v_shaped'] = dd['shapedirs'].dot(dd['betas']) + dd['v_template'] - v_shaped = dd['v_shaped'] - J_tmpx = MatVecMult(dd['J_regressor'], v_shaped[:, 0]) - J_tmpy = MatVecMult(dd['J_regressor'], v_shaped[:, 1]) - J_tmpz = MatVecMult(dd['J_regressor'], v_shaped[:, 2]) - dd['J'] = ch.vstack((J_tmpx, J_tmpy, J_tmpz)).T - dd['v_posed'] = v_shaped + dd['posedirs'].dot(posemap(dd['bs_type'])(dd['pose'])) - else: - dd['v_posed'] = dd['v_template'] + dd['posedirs'].dot(posemap(dd['bs_type'])(dd['pose'])) - - if highRes is not None: - with open(highRes, 'rb') as f: - mapping, hf = pickle.load(f, encoding='latin1') - num_betas = dd['shapedirs'].shape[-1] - hv = mapping.dot(dd['v_template'].ravel()).reshape(-1, 3) - J_reg = dd['J_regressor'].asformat('csr') - dd['f'] = hf - dd['v_template'] = hv - dd['weights'] = np.hstack([ - np.expand_dims( - np.mean( - mapping.dot(np.repeat(np.expand_dims(dd['weights'][:, i], -1), 3)).reshape(-1, 3) - , axis=1), - axis=-1) - for i in range(24) - ]) - dd['posedirs'] = mapping.dot(dd['posedirs'].reshape((-1, 207))).reshape(-1, 3, 207) - dd['shapedirs'] = mapping.dot(dd['shapedirs'].reshape((-1, num_betas))).reshape(-1, 3, num_betas) - dd['J_regressor'] = sp.csr_matrix((J_reg.data, J_reg.indices, J_reg.indptr), shape=(24, hv.shape[0])) - - return dd diff --git a/dataset/smpl_layer/smpl_layer.py b/dataset/smpl_layer/smpl_layer.py deleted file mode 100644 index 9864646..0000000 --- a/dataset/smpl_layer/smpl_layer.py +++ /dev/null @@ -1,205 +0,0 @@ -import os - -import numpy as np -import torch -from torch.nn import Module - -from dataset.smpl_layer.serialization import ready_arguments -from dataset.smpl_layer.tensutils import (th_posemap_axisang, th_with_zeros, th_pack, make_list, subtract_flat_id) -from dataset.obj_io import write_obj - -from models.kinematics import ForwardKinematics -from models.transforms import aa2mat -from models.deformation import deform_with_offset - - -class SMPL_Layer(Module): - __constants__ = ['kintree_parents', 'gender', 'center_idx', 'num_joints'] - - def __init__(self, - center_idx=None, - gender='neutral', - model_root='./dataset/smpl_model', - highRes=False): - """ - Args: - center_idx: index of center joint in our computations, - model_root: path to pkl files for the model - gender: 'neutral' (default) or 'female' or 'male' - """ - super().__init__() - - self.center_idx = center_idx - self.gender = gender - - if gender == 'neutral': - self.model_path = os.path.join(model_root, 'basicModel_neutral_lbs_10_207_0_v1.0.0.pkl') - elif gender == 'female': - self.model_path = os.path.join(model_root, 'basicModel_f_lbs_10_207_0_v1.0.0.pkl') - elif gender == 'male': - self.model_path = os.path.join(model_root, 'basicModel_m_lbs_10_207_0_v1.0.0.pkl') - highRes_path = os.path.join(model_root, 'hresMapping.pkl') if highRes else None - - smpl_data = ready_arguments(self.model_path, highRes_path) - self.smpl_data = smpl_data - - self.register_buffer('th_betas', - torch.Tensor(smpl_data['betas']).unsqueeze(0)) - self.register_buffer('th_shapedirs', - torch.Tensor(smpl_data['shapedirs'].copy())) - self.register_buffer('th_posedirs', - torch.Tensor(smpl_data['posedirs'])) - self.register_buffer( - 'th_v_template', - torch.Tensor(smpl_data['v_template']).unsqueeze(0)) - self.register_buffer( - 'th_J_regressor', - torch.Tensor(np.array(smpl_data['J_regressor'].toarray()))) - self.register_buffer('th_weights', - torch.Tensor(smpl_data['weights'])) - self.register_buffer('th_faces', - torch.Tensor(smpl_data['f'].astype(np.int32)).long()) - - # Kinematic chain params - self.kintree_table = smpl_data['kintree_table'] - parents = list(self.kintree_table[0].tolist()) - self.kintree_parents = parents - self.num_joints = len(parents) # 24 - self.num_verts = self.th_v_template.shape[1] - - self.fk = ForwardKinematics(self.kintree_parents) - - def get_offset(self, shapes=torch.zeros((1, 10))): - batch_size = shapes.shape[0] - parent_smpl = self.kintree_parents - t_pose, j_loc = self.forward(torch.zeros((batch_size, 24 * 3), device=shapes.device), shapes) - for i in list(range(len(parent_smpl)))[::-1]: - if i == 0: - break - p = parent_smpl[i] - j_loc[:, i] -= j_loc[:, p] - offset = j_loc - return offset - - def forward(self, - th_pose_axisang, - th_betas=None, - th_trans=torch.zeros(1), - requires_transformation=False): - """ - Args: - th_pose_axisang (Tensor (batch_size x 72)): pose parameters in axis-angle representation - th_betas (Tensor (batch_size x 10)): if provided, uses given shape parameters - th_trans (Tensor (batch_size x 3)): if provided, applies trans to joints and vertices - """ - - batch_size = th_pose_axisang.shape[0] - # Convert axis-angle representation to rotation matrix rep. - th_pose_rotmat = th_posemap_axisang(th_pose_axisang) - # Take out the first rotmat (global rotation) - root_rot = th_pose_rotmat[:, :9].view(batch_size, 3, 3) - # Take out the remaining rotmats (23 joints) - th_pose_rotmat = th_pose_rotmat[:, 9:] - th_pose_map = subtract_flat_id(th_pose_rotmat) - - # Below does: v_shaped = v_template + shapedirs * betas - # If shape parameters are not provided - if th_betas is None: - th_v_shaped = self.th_v_template + torch.matmul( - self.th_shapedirs, self.th_betas.transpose(1, 0)).permute(2, 0, 1) - th_j = torch.matmul(self.th_J_regressor, th_v_shaped).repeat( - batch_size, 1, 1) - else: - th_v_shaped = self.th_v_template + torch.matmul( - self.th_shapedirs, th_betas.transpose(1, 0)).permute(2, 0, 1) - th_j = torch.matmul(self.th_J_regressor, th_v_shaped) - - # Below does: v_posed = v_shaped + posedirs * pose_map - th_v_posed = th_v_shaped + torch.matmul( - self.th_posedirs, th_pose_map.transpose(0, 1)).permute(2, 0, 1) - # Final T pose with transformation done! - - # Global rigid transformation - th_results = [] - - root_j = th_j[:, 0, :].contiguous().view(batch_size, 3, 1) - th_results.append(th_with_zeros(torch.cat([root_rot, root_j], 2))) - - # Rotate each part - for i in range(self.num_joints - 1): - i_val = int(i + 1) - joint_rot = th_pose_rotmat[:, (i_val - 1) * 9:i_val * - 9].contiguous().view(batch_size, 3, 3) - joint_j = th_j[:, i_val, :].contiguous().view(batch_size, 3, 1) - parent = make_list(self.kintree_parents)[i_val] - parent_j = th_j[:, parent, :].contiguous().view(batch_size, 3, 1) - joint_rel_transform = th_with_zeros( - torch.cat([joint_rot, joint_j - parent_j], 2)) - th_results.append( - torch.matmul(th_results[parent], joint_rel_transform)) - th_results_global = th_results - - th_results2 = torch.zeros((batch_size, 4, 4, self.num_joints), - dtype=root_j.dtype, - device=root_j.device) - - for i in range(self.num_joints): - padd_zero = torch.zeros(1, dtype=th_j.dtype, device=th_j.device) - joint_j = torch.cat( - [th_j[:, i], - padd_zero.view(1, 1).repeat(batch_size, 1)], 1) - tmp = torch.bmm(th_results[i], joint_j.unsqueeze(2)) - th_results2[:, :, :, i] = th_results[i] - th_pack(tmp) - - th_T = torch.matmul(th_results2, self.th_weights.transpose(0, 1)) - - th_rest_shape_h = torch.cat([ - th_v_posed.transpose(2, 1), - torch.ones((batch_size, 1, th_v_posed.shape[1]), - dtype=th_T.dtype, - device=th_T.device), - ], 1) - - th_verts = (th_T * th_rest_shape_h.unsqueeze(1)).sum(2).transpose(2, 1) - th_verts = th_verts[:, :, :3] - th_jtr = torch.stack(th_results_global, dim=1)[:, :, :3, 3] - - # If translation is not provided - if th_trans is None or bool(torch.norm(th_trans) == 0): - if self.center_idx is not None: - center_joint = th_jtr[:, self.center_idx].unsqueeze(1) - th_jtr = th_jtr - center_joint - th_verts = th_verts - center_joint - else: - th_jtr = th_jtr + th_trans.unsqueeze(1) - th_verts = th_verts + th_trans.unsqueeze(1) - - # Vertices and joints in meters - if requires_transformation: - return th_verts, th_jtr, th_results2 - else: - return th_verts, th_jtr - - def forward_lbs(self, poses, shapes=None, v_offsets=0): - if shapes is None: - shapes = torch.zeros((poses.shape[0], 10), device=poses.device) - t_pose = self.forward(torch.zeros_like(poses), shapes)[0] - offsets = self.get_offset(shapes) - local_mat = aa2mat(poses.reshape(poses.shape[0], -1, 3)) - global_mat = self.fk.forward(local_mat, offsets) - return deform_with_offset(t_pose, self.th_weights, global_mat, offset=v_offsets) - - def save_obj(self, filename, verts): - write_obj(filename, verts, self.th_faces) - - def pose_blendshapes(self, th_pose_axisang): - batch_size = th_pose_axisang.shape[0] - # Convert axis-angle representation to rotation matrix rep. - th_pose_rotmat = th_posemap_axisang(th_pose_axisang) - # Take out the first rotmat (global rotation) - root_rot = th_pose_rotmat[:, :9].view(batch_size, 3, 3) - # Take out the remaining rotmats (23 joints) - th_pose_rotmat = th_pose_rotmat[:, 9:] - th_pose_map = subtract_flat_id(th_pose_rotmat) - - return torch.matmul(self.th_posedirs, th_pose_map.transpose(0, 1)).permute(2, 0, 1) diff --git a/dataset/smpl_layer/tensutils.py b/dataset/smpl_layer/tensutils.py deleted file mode 100644 index 80ff936..0000000 --- a/dataset/smpl_layer/tensutils.py +++ /dev/null @@ -1,53 +0,0 @@ -import torch - -from dataset.smpl_layer import rodrigues_layer - - -def th_posemap_axisang(pose_vectors): - ''' - Converts axis-angle to rotmat - pose_vectors (Tensor (batch_size x 72)): pose parameters in axis-angle representation - ''' - rot_nb = int(pose_vectors.shape[1] / 3) - rot_mats = [] - for joint_idx in range(rot_nb): - axis_ang = pose_vectors[:, joint_idx * 3:(joint_idx + 1) * 3] - rot_mat = rodrigues_layer.batch_rodrigues(axis_ang) - rot_mats.append(rot_mat) - - rot_mats = torch.cat(rot_mats, 1) - return rot_mats - - -def th_with_zeros(tensor): - batch_size = tensor.shape[0] - padding = tensor.new([0.0, 0.0, 0.0, 1.0]) - padding.requires_grad = False - - concat_list = [tensor, padding.view(1, 1, 4).repeat(batch_size, 1, 1)] - cat_res = torch.cat(concat_list, 1) - return cat_res - - -def th_pack(tensor): - batch_size = tensor.shape[0] - padding = tensor.new_zeros((batch_size, 4, 3)) - padding.requires_grad = False - pack_list = [padding, tensor] - pack_res = torch.cat(pack_list, 2) - return pack_res - - -def subtract_flat_id(rot_mats): - # Subtracts identity as a flattened tensor - id_flat = torch.eye( - 3, dtype=rot_mats.dtype, device=rot_mats.device).view(1, 9).repeat( - rot_mats.shape[0], 23) - # id_flat.requires_grad = False - results = rot_mats - id_flat - return results - - -def make_list(tensor): - # type: (List[int]) -> List[int] - return tensor