-
Notifications
You must be signed in to change notification settings - Fork 0
/
hand_crop2.py
60 lines (45 loc) · 1.82 KB
/
hand_crop2.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
"""
Process CMU Hand dataset to get cropped hand datasets.
"""
import os
import numpy as np
import json
from PIL import Image
dirs = ['train', 'test']
for data in dirs:
savd_dir = 'hand_labels/' + data + '/crop/'
new_label_dir = 'hand_labels/' + data + '/crop_label/'
new_box_dir = 'hand_labels/' + data + '/crop_box/'
imgs = os.listdir('hand_labels/' + data + '/data/')
for img in imgs:
if img == '.DS_Store':
continue
data_dir = 'hand_labels/' + data + '/data/' + img
label_dir = 'hand_labels/' + data + '/label/' + img[:-4] + '.json'
dat = json.load(open(label_dir))
pts = np.array(dat['hand_pts'])
xmin = min(pts[:, 0])
xmax = max(pts[:, 0])
ymin = min(pts[:, 1])
ymax = max(pts[:, 1])
B = max(xmax - xmin, ymax - ymin)
# B is the maximum dimension of the tightest bounding box
width = 2.2 * B # This is based on the paper
# the center of hand box can be
center = dat["hand_box_center"]
hand_box = [[center[0] - width / 2., center[1] - width / 2.],
[center[0] + width / 2., center[1] + width / 2.]]
hand_box = np.array(hand_box)
im = Image.open(data_dir)
im = im.crop((hand_box[0, 0], hand_box[0, 1], hand_box[1, 0], hand_box[1, 1]))
im = im.resize((256, 256))
im.save(savd_dir + img) # save cropped image
lbl = pts[:, :2] - hand_box[0, :]
lbl = lbl * 256 / width
lbl = lbl.tolist()
#hand_box = hand_box.tolist()
#label_dict = {}
#label_dict['keypoints'] = lbl
#json.dump(label_dict, open(new_label_dir + img[:-4] + '.json', 'w'))
json.dump(lbl, open(new_label_dir + img[:-4] + '.json', 'w'))
#json.dump(hand_box, open(new_box_dir + img[:-4] + '.json', 'w'))