8
8
import matplotlib .pyplot as plt
9
9
import cv2
10
10
11
+ from src .hand_model_outputsize import model
12
+
11
13
12
14
def padRightDownCorner (img , stride , padValue ):
13
15
h = img .shape [0 ]
14
16
w = img .shape [1 ]
15
17
16
18
pad = 4 * [None ]
17
- pad [0 ] = 0 # up
18
- pad [1 ] = 0 # left
19
- pad [2 ] = 0 if (h % stride == 0 ) else stride - (h % stride ) # down
20
- pad [3 ] = 0 if (w % stride == 0 ) else stride - (w % stride ) # right
19
+ pad [0 ] = 0 # up
20
+ pad [1 ] = 0 # left
21
+ pad [2 ] = 0 if (h % stride == 0 ) else stride - (h % stride ) # down
22
+ pad [3 ] = 0 if (w % stride == 0 ) else stride - (w % stride ) # right
21
23
22
24
img_padded = img
23
- pad_up = np .tile (img_padded [0 :1 , :, :]* 0 + padValue , (pad [0 ], 1 , 1 ))
25
+ pad_up = np .tile (img_padded [0 :1 , :, :] * 0 + padValue , (pad [0 ], 1 , 1 ))
24
26
img_padded = np .concatenate ((pad_up , img_padded ), axis = 0 )
25
- pad_left = np .tile (img_padded [:, 0 :1 , :]* 0 + padValue , (1 , pad [1 ], 1 ))
27
+ pad_left = np .tile (img_padded [:, 0 :1 , :] * 0 + padValue , (1 , pad [1 ], 1 ))
26
28
img_padded = np .concatenate ((pad_left , img_padded ), axis = 1 )
27
- pad_down = np .tile (img_padded [- 2 :- 1 , :, :]* 0 + padValue , (pad [2 ], 1 , 1 ))
29
+ pad_down = np .tile (img_padded [- 2 :- 1 , :, :] * 0 + padValue , (pad [2 ], 1 , 1 ))
28
30
img_padded = np .concatenate ((img_padded , pad_down ), axis = 0 )
29
- pad_right = np .tile (img_padded [:, - 2 :- 1 , :]* 0 + padValue , (1 , pad [3 ], 1 ))
31
+ pad_right = np .tile (img_padded [:, - 2 :- 1 , :] * 0 + padValue , (1 , pad [3 ], 1 ))
30
32
img_padded = np .concatenate ((img_padded , pad_right ), axis = 1 )
31
33
32
34
return img_padded , pad
33
35
36
+
34
37
# transfer caffe model to pytorch which will match the layer name
35
38
def transfer (model , model_weights ):
36
39
transfered_model_weights = {}
37
40
for weights_name in model .state_dict ().keys ():
38
41
transfered_model_weights [weights_name ] = model_weights ['.' .join (weights_name .split ('.' )[1 :])]
39
42
return transfered_model_weights
40
43
44
+
45
+ def transfer2coordinate (candidate , subset ):
46
+ coordinates = []
47
+ keyMap = ['eyebrow_center' , 'neck' ,
48
+ 'left_shoulder' , 'left_elbow' , 'left_wrist' ,
49
+ 'right_shoulder' , 'right_elbow' , 'right_wrist' ,
50
+ 'left_hip' , 'left_knee' , 'left_ankle' ,
51
+ 'right_hip' , 'right_knee' , 'right_ankle'
52
+ ]
53
+ for n in range (len (subset )):
54
+ keypoint = {}
55
+ for i in range (18 ):
56
+ index = int (subset [n ][i ])
57
+ if index == - 1 :
58
+ continue
59
+ x , y = candidate [index ][0 :2 ]
60
+ key = keyMap [i ]
61
+ keypoint [key ] = (x , y )
62
+ coordinates .append (keypoint )
63
+ return coordinates
64
+
41
65
# draw the body keypoint and lims
42
66
def draw_bodypose (canvas , candidate , subset ):
43
67
stickwidth = 4
@@ -74,6 +98,7 @@ def draw_bodypose(canvas, candidate, subset):
74
98
# plt.imshow(canvas[:, :, [2, 1, 0]])
75
99
return canvas
76
100
101
+
77
102
def draw_handpose (canvas , all_hand_peaks , show_number = False ):
78
103
edges = [[0 , 1 ], [1 , 2 ], [2 , 3 ], [3 , 4 ], [0 , 5 ], [5 , 6 ], [6 , 7 ], [7 , 8 ], [0 , 9 ], [9 , 10 ], \
79
104
[10 , 11 ], [11 , 12 ], [0 , 13 ], [13 , 14 ], [14 , 15 ], [15 , 16 ], [0 , 17 ], [17 , 18 ], [18 , 19 ], [19 , 20 ]]
@@ -90,10 +115,10 @@ def draw_handpose(canvas, all_hand_peaks, show_number=False):
90
115
91
116
for peaks in all_hand_peaks :
92
117
for ie , e in enumerate (edges ):
93
- if np .sum (np .all (peaks [e ], axis = 1 )== 0 ) == 0 :
118
+ if np .sum (np .all (peaks [e ], axis = 1 ) == 0 ) == 0 :
94
119
x1 , y1 = peaks [e [0 ]]
95
120
x2 , y2 = peaks [e [1 ]]
96
- ax .plot ([x1 , x2 ], [y1 , y2 ], color = matplotlib .colors .hsv_to_rgb ([ie / float (len (edges )), 1.0 , 1.0 ]))
121
+ ax .plot ([x1 , x2 ], [y1 , y2 ], color = matplotlib .colors .hsv_to_rgb ([ie / float (len (edges )), 1.0 , 1.0 ]))
97
122
98
123
for i , keyponit in enumerate (peaks ):
99
124
x , y = keyponit
@@ -104,17 +129,19 @@ def draw_handpose(canvas, all_hand_peaks, show_number=False):
104
129
canvas = np .fromstring (bg .tostring_rgb (), dtype = 'uint8' ).reshape (int (height ), int (width ), 3 )
105
130
return canvas
106
131
132
+
107
133
# image drawed by opencv is not good.
108
134
def draw_handpose_by_opencv (canvas , peaks , show_number = False ):
109
135
edges = [[0 , 1 ], [1 , 2 ], [2 , 3 ], [3 , 4 ], [0 , 5 ], [5 , 6 ], [6 , 7 ], [7 , 8 ], [0 , 9 ], [9 , 10 ], \
110
136
[10 , 11 ], [11 , 12 ], [0 , 13 ], [13 , 14 ], [14 , 15 ], [15 , 16 ], [0 , 17 ], [17 , 18 ], [18 , 19 ], [19 , 20 ]]
111
137
# cv2.rectangle(canvas, (x, y), (x+w, y+w), (0, 255, 0), 2, lineType=cv2.LINE_AA)
112
138
# cv2.putText(canvas, 'left' if is_left else 'right', (x, y), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2)
113
139
for ie , e in enumerate (edges ):
114
- if np .sum (np .all (peaks [e ], axis = 1 )== 0 ) == 0 :
140
+ if np .sum (np .all (peaks [e ], axis = 1 ) == 0 ) == 0 :
115
141
x1 , y1 = peaks [e [0 ]]
116
142
x2 , y2 = peaks [e [1 ]]
117
- cv2 .line (canvas , (x1 , y1 ), (x2 , y2 ), matplotlib .colors .hsv_to_rgb ([ie / float (len (edges )), 1.0 , 1.0 ])* 255 , thickness = 2 )
143
+ cv2 .line (canvas , (x1 , y1 ), (x2 , y2 ), matplotlib .colors .hsv_to_rgb ([ie / float (len (edges )), 1.0 , 1.0 ]) * 255 ,
144
+ thickness = 2 )
118
145
119
146
for i , keyponit in enumerate (peaks ):
120
147
x , y = keyponit
@@ -123,6 +150,7 @@ def draw_handpose_by_opencv(canvas, peaks, show_number=False):
123
150
cv2 .putText (canvas , str (i ), (x , y ), cv2 .FONT_HERSHEY_SIMPLEX , 0.3 , (0 , 0 , 0 ), lineType = cv2 .LINE_AA )
124
151
return canvas
125
152
153
+
126
154
# detect hand according to body pose keypoints
127
155
# please refer to https://github.com/CMU-Perceptual-Computing-Lab/openpose/blob/master/src/openpose/hand/handDetector.cpp
128
156
def handDetect (candidate , subset , oriImg ):
@@ -138,7 +166,7 @@ def handDetect(candidate, subset, oriImg):
138
166
if not (has_left or has_right ):
139
167
continue
140
168
hands = []
141
- #left hand
169
+ # left hand
142
170
if has_left :
143
171
left_shoulder_index , left_elbow_index , left_wrist_index = person [[5 , 6 , 7 ]]
144
172
x1 , y1 = candidate [left_shoulder_index ][:2 ]
@@ -189,6 +217,7 @@ def handDetect(candidate, subset, oriImg):
189
217
'''
190
218
return detect_result
191
219
220
+
192
221
# get max index of 2d array
193
222
def npmax (array ):
194
223
arrayindex = array .argmax (1 )
0 commit comments