Spaces:
Runtime error
Runtime error
Vaibhavnaik12
commited on
Upload cloth_masker.py
Browse files- model/cloth_masker.py +273 -0
model/cloth_masker.py
ADDED
@@ -0,0 +1,273 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from PIL import Image
|
3 |
+
from typing import Union
|
4 |
+
import numpy as np
|
5 |
+
import cv2
|
6 |
+
from diffusers.image_processor import VaeImageProcessor
|
7 |
+
import torch
|
8 |
+
|
9 |
+
from model.SCHP import SCHP # type: ignore
|
10 |
+
from model.DensePose import DensePose # type: ignore
|
11 |
+
|
12 |
+
DENSE_INDEX_MAP = {
|
13 |
+
"background": [0],
|
14 |
+
"torso": [1, 2],
|
15 |
+
"right hand": [3],
|
16 |
+
"left hand": [4],
|
17 |
+
"right foot": [5],
|
18 |
+
"left foot": [6],
|
19 |
+
"right thigh": [7, 9],
|
20 |
+
"left thigh": [8, 10],
|
21 |
+
"right leg": [11, 13],
|
22 |
+
"left leg": [12, 14],
|
23 |
+
"left big arm": [15, 17],
|
24 |
+
"right big arm": [16, 18],
|
25 |
+
"left forearm": [19, 21],
|
26 |
+
"right forearm": [20, 22],
|
27 |
+
"face": [23, 24],
|
28 |
+
"thighs": [7, 8, 9, 10],
|
29 |
+
"legs": [11, 12, 13, 14],
|
30 |
+
"hands": [3, 4],
|
31 |
+
"feet": [5, 6],
|
32 |
+
"big arms": [15, 16, 17, 18],
|
33 |
+
"forearms": [19, 20, 21, 22],
|
34 |
+
}
|
35 |
+
|
36 |
+
ATR_MAPPING = {
|
37 |
+
'Background': 0, 'Hat': 1, 'Hair': 2, 'Sunglasses': 3,
|
38 |
+
'Upper-clothes': 4, 'Skirt': 5, 'Pants': 6, 'Dress': 7,
|
39 |
+
'Belt': 8, 'Left-shoe': 9, 'Right-shoe': 10, 'Face': 11,
|
40 |
+
'Left-leg': 12, 'Right-leg': 13, 'Left-arm': 14, 'Right-arm': 15,
|
41 |
+
'Bag': 16, 'Scarf': 17
|
42 |
+
}
|
43 |
+
|
44 |
+
LIP_MAPPING = {
|
45 |
+
'Background': 0, 'Hat': 1, 'Hair': 2, 'Glove': 3,
|
46 |
+
'Sunglasses': 4, 'Upper-clothes': 5, 'Dress': 6, 'Coat': 7,
|
47 |
+
'Socks': 8, 'Pants': 9, 'Jumpsuits': 10, 'Scarf': 11,
|
48 |
+
'Skirt': 12, 'Face': 13, 'Left-arm': 14, 'Right-arm': 15,
|
49 |
+
'Left-leg': 16, 'Right-leg': 17, 'Left-shoe': 18, 'Right-shoe': 19
|
50 |
+
}
|
51 |
+
|
52 |
+
PROTECT_BODY_PARTS = {
|
53 |
+
'upper': ['Left-leg', 'Right-leg'],
|
54 |
+
'lower': ['Right-arm', 'Left-arm', 'Face'],
|
55 |
+
'overall': [],
|
56 |
+
'inner': ['Left-leg', 'Right-leg'],
|
57 |
+
'outer': ['Left-leg', 'Right-leg'],
|
58 |
+
}
|
59 |
+
PROTECT_CLOTH_PARTS = {
|
60 |
+
'upper': {
|
61 |
+
'ATR': ['Skirt', 'Pants'],
|
62 |
+
'LIP': ['Skirt', 'Pants']
|
63 |
+
},
|
64 |
+
'lower': {
|
65 |
+
'ATR': ['Upper-clothes'],
|
66 |
+
'LIP': ['Upper-clothes', 'Coat']
|
67 |
+
},
|
68 |
+
'overall': {
|
69 |
+
'ATR': [],
|
70 |
+
'LIP': []
|
71 |
+
},
|
72 |
+
'inner': {
|
73 |
+
'ATR': ['Dress', 'Coat', 'Skirt', 'Pants'],
|
74 |
+
'LIP': ['Dress', 'Coat', 'Skirt', 'Pants', 'Jumpsuits']
|
75 |
+
},
|
76 |
+
'outer': {
|
77 |
+
'ATR': ['Dress', 'Pants', 'Skirt'],
|
78 |
+
'LIP': ['Upper-clothes', 'Dress', 'Pants', 'Skirt', 'Jumpsuits']
|
79 |
+
}
|
80 |
+
}
|
81 |
+
MASK_CLOTH_PARTS = {
|
82 |
+
'upper': ['Upper-clothes', 'Coat', 'Dress', 'Jumpsuits'],
|
83 |
+
'lower': ['Pants', 'Skirt', 'Dress', 'Jumpsuits'],
|
84 |
+
'overall': ['Upper-clothes', 'Dress', 'Pants', 'Skirt', 'Coat', 'Jumpsuits'],
|
85 |
+
'inner': ['Upper-clothes'],
|
86 |
+
'outer': ['Coat',]
|
87 |
+
}
|
88 |
+
MASK_DENSE_PARTS = {
|
89 |
+
'upper': ['torso', 'big arms', 'forearms'],
|
90 |
+
'lower': ['thighs', 'legs'],
|
91 |
+
'overall': ['torso', 'thighs', 'legs', 'big arms', 'forearms'],
|
92 |
+
'inner': ['torso'],
|
93 |
+
'outer': ['torso', 'big arms', 'forearms']
|
94 |
+
}
|
95 |
+
|
96 |
+
schp_public_protect_parts = ['Hat', 'Hair', 'Sunglasses', 'Left-shoe', 'Right-shoe', 'Bag', 'Glove', 'Scarf']
|
97 |
+
schp_protect_parts = {
|
98 |
+
'upper': ['Left-leg', 'Right-leg', 'Skirt', 'Pants', 'Jumpsuits'],
|
99 |
+
'lower': ['Left-arm', 'Right-arm', 'Upper-clothes', 'Coat'],
|
100 |
+
'overall': [],
|
101 |
+
'inner': ['Left-leg', 'Right-leg', 'Skirt', 'Pants', 'Jumpsuits', 'Coat'],
|
102 |
+
'outer': ['Left-leg', 'Right-leg', 'Skirt', 'Pants', 'Jumpsuits', 'Upper-clothes']
|
103 |
+
}
|
104 |
+
schp_mask_parts = {
|
105 |
+
'upper': ['Upper-clothes', 'Dress', 'Coat', 'Jumpsuits'],
|
106 |
+
'lower': ['Pants', 'Skirt', 'Dress', 'Jumpsuits', 'socks'],
|
107 |
+
'overall': ['Upper-clothes', 'Dress', 'Pants', 'Skirt', 'Coat', 'Jumpsuits', 'socks'],
|
108 |
+
'inner': ['Upper-clothes'],
|
109 |
+
'outer': ['Coat',]
|
110 |
+
}
|
111 |
+
|
112 |
+
dense_mask_parts = {
|
113 |
+
'upper': ['torso', 'big arms', 'forearms'],
|
114 |
+
'lower': ['thighs', 'legs'],
|
115 |
+
'overall': ['torso', 'thighs', 'legs', 'big arms', 'forearms'],
|
116 |
+
'inner': ['torso'],
|
117 |
+
'outer': ['torso', 'big arms', 'forearms']
|
118 |
+
}
|
119 |
+
|
120 |
+
def vis_mask(image, mask):
|
121 |
+
image = np.array(image).astype(np.uint8)
|
122 |
+
mask = np.array(mask).astype(np.uint8)
|
123 |
+
mask[mask > 127] = 255
|
124 |
+
mask[mask <= 127] = 0
|
125 |
+
mask = np.expand_dims(mask, axis=-1)
|
126 |
+
mask = np.repeat(mask, 3, axis=-1)
|
127 |
+
mask = mask / 255
|
128 |
+
return Image.fromarray((image * (1 - mask)).astype(np.uint8))
|
129 |
+
|
130 |
+
def part_mask_of(part: Union[str, list],
|
131 |
+
parse: np.ndarray, mapping: dict):
|
132 |
+
if isinstance(part, str):
|
133 |
+
part = [part]
|
134 |
+
mask = np.zeros_like(parse)
|
135 |
+
for _ in part:
|
136 |
+
if _ not in mapping:
|
137 |
+
continue
|
138 |
+
if isinstance(mapping[_], list):
|
139 |
+
for i in mapping[_]:
|
140 |
+
mask += (parse == i)
|
141 |
+
else:
|
142 |
+
mask += (parse == mapping[_])
|
143 |
+
return mask
|
144 |
+
|
145 |
+
def hull_mask(mask_area: np.ndarray):
|
146 |
+
ret, binary = cv2.threshold(mask_area, 127, 255, cv2.THRESH_BINARY)
|
147 |
+
contours, hierarchy = cv2.findContours(binary, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
|
148 |
+
hull_mask = np.zeros_like(mask_area)
|
149 |
+
for c in contours:
|
150 |
+
hull = cv2.convexHull(c)
|
151 |
+
hull_mask = cv2.fillPoly(np.zeros_like(mask_area), [hull], 255) | hull_mask
|
152 |
+
return hull_mask
|
153 |
+
|
154 |
+
|
155 |
+
class AutoMasker:
|
156 |
+
def __init__(
|
157 |
+
self,
|
158 |
+
densepose_ckpt='./Models/DensePose',
|
159 |
+
schp_ckpt='./Models/SCHP',
|
160 |
+
device='cuda'):
|
161 |
+
np.random.seed(0)
|
162 |
+
torch.manual_seed(0)
|
163 |
+
torch.cuda.manual_seed(0)
|
164 |
+
|
165 |
+
self.densepose_processor = DensePose(densepose_ckpt, device)
|
166 |
+
self.schp_processor_atr = SCHP(ckpt_path=os.path.join(schp_ckpt, 'exp-schp-201908301523-atr.pth'), device=device)
|
167 |
+
self.schp_processor_lip = SCHP(ckpt_path=os.path.join(schp_ckpt, 'exp-schp-201908261155-lip.pth'), device=device)
|
168 |
+
|
169 |
+
self.mask_processor = VaeImageProcessor(vae_scale_factor=8, do_normalize=False, do_binarize=True, do_convert_grayscale=True)
|
170 |
+
|
171 |
+
def process_densepose(self, image_or_path):
|
172 |
+
return self.densepose_processor(image_or_path, resize=1024)
|
173 |
+
|
174 |
+
def process_schp_lip(self, image_or_path):
|
175 |
+
return self.schp_processor_lip(image_or_path)
|
176 |
+
|
177 |
+
def process_schp_atr(self, image_or_path):
|
178 |
+
return self.schp_processor_atr(image_or_path)
|
179 |
+
|
180 |
+
def preprocess_image(self, image_or_path):
|
181 |
+
return {
|
182 |
+
'densepose': self.densepose_processor(image_or_path, resize=1024),
|
183 |
+
'schp_atr': self.schp_processor_atr(image_or_path),
|
184 |
+
'schp_lip': self.schp_processor_lip(image_or_path)
|
185 |
+
}
|
186 |
+
|
187 |
+
@staticmethod
|
188 |
+
def cloth_agnostic_mask(
|
189 |
+
densepose_mask: Image.Image,
|
190 |
+
schp_lip_mask: Image.Image,
|
191 |
+
schp_atr_mask: Image.Image,
|
192 |
+
part: str='overall',
|
193 |
+
**kwargs
|
194 |
+
):
|
195 |
+
assert part in ['upper', 'lower', 'overall', 'inner', 'outer'], f"part should be one of ['upper', 'lower', 'overall', 'inner', 'outer'], but got {part}"
|
196 |
+
w, h = densepose_mask.size
|
197 |
+
|
198 |
+
dilate_kernel = max(w, h) // 250
|
199 |
+
dilate_kernel = dilate_kernel if dilate_kernel % 2 == 1 else dilate_kernel + 1
|
200 |
+
dilate_kernel = np.ones((dilate_kernel, dilate_kernel), np.uint8)
|
201 |
+
|
202 |
+
kernal_size = max(w, h) // 15
|
203 |
+
kernal_size = kernal_size if kernal_size % 2 == 1 else kernal_size + 1
|
204 |
+
|
205 |
+
densepose_mask = np.array(densepose_mask)
|
206 |
+
schp_lip_mask = np.array(schp_lip_mask)
|
207 |
+
schp_atr_mask = np.array(schp_atr_mask)
|
208 |
+
|
209 |
+
# Strong Protect Area (Hands, Face, Accessory, Feet)
|
210 |
+
hands_protect_area = part_mask_of(['hands', 'feet'], densepose_mask, DENSE_INDEX_MAP)
|
211 |
+
hands_protect_area = cv2.dilate(hands_protect_area, dilate_kernel, iterations=1)
|
212 |
+
hands_protect_area = hands_protect_area & \
|
213 |
+
(part_mask_of(['Left-arm', 'Right-arm', 'Left-leg', 'Right-leg'], schp_atr_mask, ATR_MAPPING) | \
|
214 |
+
part_mask_of(['Left-arm', 'Right-arm', 'Left-leg', 'Right-leg'], schp_lip_mask, LIP_MAPPING))
|
215 |
+
face_protect_area = part_mask_of('Face', schp_lip_mask, LIP_MAPPING)
|
216 |
+
|
217 |
+
strong_protect_area = hands_protect_area | face_protect_area
|
218 |
+
|
219 |
+
# Weak Protect Area (Hair, Irrelevant Clothes, Body Parts)
|
220 |
+
body_protect_area = part_mask_of(PROTECT_BODY_PARTS[part], schp_lip_mask, LIP_MAPPING) | part_mask_of(PROTECT_BODY_PARTS[part], schp_atr_mask, ATR_MAPPING)
|
221 |
+
hair_protect_area = part_mask_of(['Hair'], schp_lip_mask, LIP_MAPPING) | \
|
222 |
+
part_mask_of(['Hair'], schp_atr_mask, ATR_MAPPING)
|
223 |
+
cloth_protect_area = part_mask_of(PROTECT_CLOTH_PARTS[part]['LIP'], schp_lip_mask, LIP_MAPPING) | \
|
224 |
+
part_mask_of(PROTECT_CLOTH_PARTS[part]['ATR'], schp_atr_mask, ATR_MAPPING)
|
225 |
+
accessory_protect_area = part_mask_of((accessory_parts := ['Hat', 'Glove', 'Sunglasses', 'Bag', 'Left-shoe', 'Right-shoe', 'Scarf', 'Socks']), schp_lip_mask, LIP_MAPPING) | \
|
226 |
+
part_mask_of(accessory_parts, schp_atr_mask, ATR_MAPPING)
|
227 |
+
weak_protect_area = body_protect_area | cloth_protect_area | hair_protect_area | strong_protect_area | accessory_protect_area
|
228 |
+
|
229 |
+
# Mask Area
|
230 |
+
strong_mask_area = part_mask_of(MASK_CLOTH_PARTS[part], schp_lip_mask, LIP_MAPPING) | \
|
231 |
+
part_mask_of(MASK_CLOTH_PARTS[part], schp_atr_mask, ATR_MAPPING)
|
232 |
+
background_area = part_mask_of(['Background'], schp_lip_mask, LIP_MAPPING) & part_mask_of(['Background'], schp_atr_mask, ATR_MAPPING)
|
233 |
+
mask_dense_area = part_mask_of(MASK_DENSE_PARTS[part], densepose_mask, DENSE_INDEX_MAP)
|
234 |
+
mask_dense_area = cv2.resize(mask_dense_area.astype(np.uint8), None, fx=0.25, fy=0.25, interpolation=cv2.INTER_NEAREST)
|
235 |
+
mask_dense_area = cv2.dilate(mask_dense_area, dilate_kernel, iterations=2)
|
236 |
+
mask_dense_area = cv2.resize(mask_dense_area.astype(np.uint8), None, fx=4, fy=4, interpolation=cv2.INTER_NEAREST)
|
237 |
+
|
238 |
+
|
239 |
+
mask_area = (np.ones_like(densepose_mask) & (~weak_protect_area) & (~background_area)) | mask_dense_area
|
240 |
+
|
241 |
+
mask_area = hull_mask(mask_area * 255) // 255 # Convex Hull to expand the mask area
|
242 |
+
mask_area = mask_area & (~weak_protect_area)
|
243 |
+
mask_area = cv2.GaussianBlur(mask_area * 255, (kernal_size, kernal_size), 0)
|
244 |
+
mask_area[mask_area < 25] = 0
|
245 |
+
mask_area[mask_area >= 25] = 1
|
246 |
+
mask_area = (mask_area | strong_mask_area) & (~strong_protect_area)
|
247 |
+
mask_area = cv2.dilate(mask_area, dilate_kernel, iterations=1)
|
248 |
+
|
249 |
+
return Image.fromarray(mask_area * 255)
|
250 |
+
|
251 |
+
def __call__(
|
252 |
+
self,
|
253 |
+
image: Union[str, Image.Image],
|
254 |
+
mask_type: str = "upper",
|
255 |
+
):
|
256 |
+
assert mask_type in ['upper', 'lower', 'overall', 'inner', 'outer'], f"mask_type should be one of ['upper', 'lower', 'overall', 'inner', 'outer'], but got {mask_type}"
|
257 |
+
preprocess_results = self.preprocess_image(image)
|
258 |
+
mask = self.cloth_agnostic_mask(
|
259 |
+
preprocess_results['densepose'],
|
260 |
+
preprocess_results['schp_lip'],
|
261 |
+
preprocess_results['schp_atr'],
|
262 |
+
part=mask_type,
|
263 |
+
)
|
264 |
+
return {
|
265 |
+
'mask': mask,
|
266 |
+
'densepose': preprocess_results['densepose'],
|
267 |
+
'schp_lip': preprocess_results['schp_lip'],
|
268 |
+
'schp_atr': preprocess_results['schp_atr']
|
269 |
+
}
|
270 |
+
|
271 |
+
|
272 |
+
if __name__ == '__main__':
|
273 |
+
pass
|