Datasets:

Modalities:
Text
Formats:
text
Libraries:
Datasets
License:
AleRu12 commited on
Commit
fff4be9
·
1 Parent(s): 90fc663

dataset split script

Browse files
Files changed (1) hide show
  1. dataset_split_generator.py +345 -0
dataset_split_generator.py ADDED
@@ -0,0 +1,345 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from os import listdir, walk
2
+ from os.path import isfile, isdir, join, splitext, exists, getmtime
3
+ from random import seed, randint, choice
4
+ import re
5
+ import json
6
+ import datetime
7
+
8
+ import argparse
9
+
10
+ parser = argparse.ArgumentParser(description='Generate BBBicycles split.')
11
+ parser.add_argument('-p', '--path', type=str, required=True,
12
+ help='directory containing the ID folders')
13
+
14
+ args = parser.parse_args()
15
+ path = args.path
16
+
17
+ random_seed = seed(1337)
18
+ train_val_bike_type_split = 10
19
+
20
+ img_regex = re.compile('(^img.\d*[.]png$)')
21
+ dir_regex = re.compile('(^\w+_)')
22
+
23
+ train = open("bike_train.txt", "w")
24
+ query_v = open("bike_query_val.txt", "w")
25
+ galley_v = open("bike_gallery_val.txt", "w")
26
+ query_t = open("bike_query_test.txt", "w")
27
+ galley_t = open("bike_gallery_test.txt", "w")
28
+
29
+ num_ids = 0
30
+ num_imgs = 0
31
+ num_damaged_imgs = 0
32
+ num_broken_imgs = 0
33
+ num_bent_imgs = 0
34
+ num_missingpart_imgs = 0
35
+ nums_missingpart_imgs = [0 for i in range(5)]
36
+ models_dist = {}
37
+
38
+ num_train_ids = 0
39
+ num_train_imgs = 0
40
+ num_damaged_train_imgs = 0
41
+ num_broken_train_imgs = 0
42
+ num_bent_train_imgs = 0
43
+ num_missingpart_train_imgs = 0
44
+ nums_missingpart_train_imgs = [0 for i in range(5)]
45
+ models_dist_train = {}
46
+
47
+ num_val_ids = 0
48
+ num_val_imgs = 0
49
+ num_damaged_val_imgs = 0
50
+ num_broken_val_imgs = 0
51
+ num_bent_val_imgs = 0
52
+ num_missingpart_val_imgs = 0
53
+ nums_missingpart_val_imgs = [0 for i in range(5)]
54
+ models_dist_val = {}
55
+
56
+ num_test_ids = 0
57
+ num_test_imgs = 0
58
+ num_damaged_test_imgs = 0
59
+ num_broken_test_imgs = 0
60
+ num_bent_test_imgs = 0
61
+ num_missingpart_test_imgs = 0
62
+ nums_missingpart_test_imgs = [0 for i in range(5)]
63
+ models_dist_test = {}
64
+
65
+ for id, bike in enumerate(listdir(path)):
66
+ if isdir(join(path, bike)) and dir_regex.match(bike) and exists(join(path, bike, "before")) and len(listdir(join(path, bike, "before"))) != 0 and exists(join(path, bike, "after")) and len(listdir(join(path, bike, "after"))) != 0 and exists(join(path, bike, "fixed_data.json")):
67
+ #aggiungere: prendere json dell'identità per estrarre info
68
+ json_fixed = open(join(path, bike, 'fixed_data.json'),)
69
+ data_fixed = json.load(json_fixed)
70
+
71
+ if str(data_fixed['Bike Type']) not in models_dist:
72
+ models_dist[str(data_fixed['Bike Type'])] = {str(data_fixed['Model']): 1}
73
+ elif str(data_fixed['Model']) not in models_dist[str(data_fixed['Bike Type'])]:
74
+ models_dist[str(data_fixed['Bike Type'])][str(data_fixed['Model'])] = 1
75
+ else:
76
+ models_dist[str(data_fixed['Bike Type'])][str(data_fixed['Model'])] = models_dist[str(data_fixed['Bike Type'])][str(data_fixed['Model'])] + 1
77
+ c=0
78
+ num_ids = num_ids + 1
79
+ for type in ["before", "after"]:
80
+ for file in listdir(join(path, bike, type)):
81
+ if img_regex.match(file):
82
+ if exists(join(path, bike, type, splitext(file)[0] + '_variable.json')):
83
+ json_var = open(join(path, bike, type, splitext(file)[0] + '_variable.json'),)
84
+ data = json.load(json_var)
85
+
86
+ dmgid = 0 if type == "before" else int(data["Damage Type"])
87
+ missid = "00000" if type == "before" else str(data["Removed Parts"])
88
+
89
+ json_var.close()
90
+
91
+ if dmgid != 0:
92
+ num_damaged_imgs = num_damaged_imgs + 1
93
+ if dmgid == 2 or dmgid == 3:
94
+ num_broken_imgs = num_broken_imgs + 1
95
+ if dmgid == 1 or dmgid == 3:
96
+ num_bent_imgs = num_bent_imgs + 1
97
+ if missid != "00000" :
98
+ num_missingpart_imgs = num_missingpart_imgs + 1
99
+ for i in range(5):
100
+ if missid[i] == "1":
101
+ nums_missingpart_imgs[i] = nums_missingpart_imgs[i] + 1
102
+ num_imgs = num_imgs + 1
103
+ c=c+1
104
+ else:
105
+ print(bike)
106
+ if c != 14:
107
+ print(bike)
108
+ if str(data_fixed['Model']) in ['mfactory ', 'ghost', 'oldbike', 'rondo', 'verdona']:
109
+ #Train
110
+ if str(data_fixed['Bike Type']) not in models_dist_train:
111
+ models_dist_train[str(data_fixed['Bike Type'])] = {str(data_fixed['Model']): 1}
112
+ elif str(data_fixed['Model']) not in models_dist_train[str(data_fixed['Bike Type'])]:
113
+ models_dist_train[str(data_fixed['Bike Type'])][str(data_fixed['Model'])] = 1
114
+ else:
115
+ models_dist_train[str(data_fixed['Bike Type'])][str(data_fixed['Model'])] = models_dist_train[str(data_fixed['Bike Type'])][str(data_fixed['Model'])] + 1
116
+
117
+ num_train_ids = num_train_ids + 1
118
+ for type in ["before", "after"]:
119
+ for file in listdir(join(path, bike, type)):
120
+ if img_regex.match(file) and exists(join(path, bike, type, splitext(file)[0] + '_variable.json')):
121
+ img_path = join(bike, type, file)
122
+
123
+ json_var = open(join(path, bike, type, splitext(file)[0] + '_variable.json'),)
124
+ data = json.load(json_var)
125
+
126
+ camid = int(data["Focal Length"])
127
+ viewid = int(data["Viewing Side"])
128
+ dmgid = 0 if type == "before" else int(data["Damage Type"])
129
+ missid = "00000" if type == "before" else str(data["Removed Parts"])
130
+
131
+ train.write("{} {} {} {} {} {}\n".format(img_path, id, camid, viewid, dmgid, missid))
132
+ json_var.close()
133
+
134
+ if dmgid != 0:
135
+ num_damaged_train_imgs = num_damaged_train_imgs + 1
136
+ if dmgid == 2 or dmgid == 3:
137
+ num_broken_train_imgs = num_broken_train_imgs + 1
138
+ if dmgid == 1 or dmgid == 3:
139
+ num_bent_train_imgs = num_bent_train_imgs + 1
140
+ if missid != "00000" :
141
+ num_missingpart_train_imgs = num_missingpart_train_imgs + 1
142
+ for i in range(5):
143
+ if missid[i] == "1":
144
+ nums_missingpart_train_imgs[i] = nums_missingpart_train_imgs[i] + 1
145
+ num_train_imgs = num_train_imgs + 1
146
+ else:
147
+ if str(data_fixed['Model']) not in ['mirage', 'gbike', 'enduro']:
148
+ if str(data_fixed['Model']) not in ['becane', 'btwin', 'croad'] and randint(0, 100) > train_val_bike_type_split:
149
+ if str(data_fixed['Bike Type']) not in models_dist_train:
150
+ models_dist_train[str(data_fixed['Bike Type'])] = {str(data_fixed['Model']): 1}
151
+ elif str(data_fixed['Model']) not in models_dist_train[str(data_fixed['Bike Type'])]:
152
+ models_dist_train[str(data_fixed['Bike Type'])][str(data_fixed['Model'])] = 1
153
+ else:
154
+ models_dist_train[str(data_fixed['Bike Type'])][str(data_fixed['Model'])] = models_dist_train[str(data_fixed['Bike Type'])][str(data_fixed['Model'])] + 1
155
+
156
+ num_train_ids = num_train_ids + 1
157
+ for type in ["before", "after"]:
158
+ for file in listdir(join(path, bike, type)):
159
+ if img_regex.match(file) and exists(join(path, bike, type, splitext(file)[0] + '_variable.json')):
160
+ img_path = join(bike, type, file)
161
+
162
+ json_var = open(join(path, bike, type, splitext(file)[0] + '_variable.json'),)
163
+ data = json.load(json_var)
164
+
165
+ camid = int(data["Focal Length"])
166
+ viewid = int(data["Viewing Side"])
167
+ dmgid = 0 if type == "before" else int(data["Damage Type"])
168
+ missid = "00000" if type == "before" else str(data["Removed Parts"])
169
+
170
+ train.write("{} {} {} {} {} {}\n".format(img_path, id, camid, viewid, dmgid, missid))
171
+ json_var.close()
172
+
173
+ if dmgid != 0:
174
+ num_damaged_train_imgs = num_damaged_train_imgs + 1
175
+ if dmgid == 2 or dmgid == 3:
176
+ num_broken_train_imgs = num_broken_train_imgs + 1
177
+ if dmgid == 1 or dmgid == 3:
178
+ num_bent_train_imgs = num_bent_train_imgs + 1
179
+ if missid != "00000" :
180
+ num_missingpart_train_imgs = num_missingpart_train_imgs + 1
181
+ for i in range(5):
182
+ if missid[i] == "1":
183
+ nums_missingpart_train_imgs[i] = nums_missingpart_train_imgs[i] + 1
184
+ num_train_imgs = num_train_imgs + 1
185
+ else:
186
+ #Val
187
+ if str(data_fixed['Bike Type']) not in models_dist_val:
188
+ models_dist_val[str(data_fixed['Bike Type'])] = {str(data_fixed['Model']): 1}
189
+ elif str(data_fixed['Model']) not in models_dist_val[str(data_fixed['Bike Type'])]:
190
+ models_dist_val[str(data_fixed['Bike Type'])][str(data_fixed['Model'])] = 1
191
+ else:
192
+ models_dist_val[str(data_fixed['Bike Type'])][str(data_fixed['Model'])] = models_dist_val[str(data_fixed['Bike Type'])][str(data_fixed['Model'])] + 1
193
+
194
+ num_val_ids = num_val_ids + 1
195
+ for type in ["before", "after"]:
196
+ files = [f for f in listdir(join(path, bike, type)) if img_regex.match(f) and exists(join(path, bike, type, splitext(f)[0] + '_variable.json'))]
197
+ file = choice(files)
198
+ img_path = join(bike, type, file)
199
+
200
+ json_var = open(join(path, bike, type, splitext(file)[0] + '_variable.json'),)
201
+ data = json.load(json_var)
202
+
203
+ camid = int(data["Focal Length"])
204
+ viewid = int(data["Viewing Side"])
205
+ dmgid = 0 if type == "before" else int(data["Damage Type"])
206
+ missid = "00000" if type == "before" else str(data["Removed Parts"])
207
+
208
+ if type == "before" :
209
+ galley_v.write("{} {} {} {} {} {}\n".format(img_path, id, camid, viewid, dmgid, missid))
210
+ else:
211
+ query_v.write("{} {} {} {} {} {}\n".format(img_path, id, camid, viewid, dmgid, missid))
212
+ json_var.close()
213
+
214
+ if dmgid != 0:
215
+ num_damaged_val_imgs = num_damaged_val_imgs + 1
216
+ if dmgid == 2 or dmgid == 3:
217
+ num_broken_val_imgs = num_broken_val_imgs + 1
218
+ if dmgid == 1 or dmgid == 3:
219
+ num_bent_val_imgs = num_bent_val_imgs + 1
220
+ if missid != "00000" :
221
+ num_missingpart_val_imgs = num_missingpart_val_imgs + 1
222
+ for i in range(5):
223
+ if missid[i] == "1":
224
+ nums_missingpart_val_imgs[i] = nums_missingpart_val_imgs[i] + 1
225
+ num_val_imgs = num_val_imgs + 1
226
+ else:
227
+ #Test
228
+ if str(data_fixed['Bike Type']) not in models_dist_test:
229
+ models_dist_test[str(data_fixed['Bike Type'])] = {str(data_fixed['Model']): 1}
230
+ elif str(data_fixed['Model']) not in models_dist_test[str(data_fixed['Bike Type'])]:
231
+ models_dist_test[str(data_fixed['Bike Type'])][str(data_fixed['Model'])] = 1
232
+ else:
233
+ models_dist_test[str(data_fixed['Bike Type'])][str(data_fixed['Model'])] = models_dist_test[str(data_fixed['Bike Type'])][str(data_fixed['Model'])] + 1
234
+
235
+ num_test_ids = num_test_ids + 1
236
+ for type in ["before", "after"]:
237
+ files = [f for f in listdir(join(path, bike, type)) if img_regex.match(f) and exists(join(path, bike, type, splitext(f)[0] + '_variable.json'))]
238
+ if not files:
239
+ print(join(path, bike, type))
240
+ file = choice(files)
241
+ img_path = join(bike, type, file)
242
+
243
+ json_var = open(join(path, bike, type, splitext(file)[0] + '_variable.json'),)
244
+ data = json.load(json_var)
245
+
246
+ camid = int(data["Focal Length"])
247
+ viewid = int(data["Viewing Side"])
248
+ dmgid = 0 if type == "before" else int(data["Damage Type"])
249
+ missid = "00000" if type == "before" else str(data["Removed Parts"])
250
+
251
+ if type == "before" :
252
+ galley_t.write("{} {} {} {} {} {}\n".format(img_path, id, camid, viewid, dmgid, missid))
253
+ else:
254
+ query_t.write("{} {} {} {} {} {}\n".format(img_path, id, camid, viewid, dmgid, missid))
255
+ json_var.close()
256
+
257
+ if dmgid != 0:
258
+ num_damaged_test_imgs = num_damaged_test_imgs + 1
259
+ if dmgid == 2 or dmgid == 3:
260
+ num_broken_test_imgs = num_broken_test_imgs + 1
261
+ if dmgid == 1 or dmgid == 3:
262
+ num_bent_test_imgs = num_bent_test_imgs + 1
263
+ if missid != "00000" :
264
+ num_missingpart_test_imgs = num_missingpart_test_imgs + 1
265
+ for i in range(5):
266
+ if missid[i] == "1":
267
+ nums_missingpart_test_imgs[i] = nums_missingpart_test_imgs[i] + 1
268
+ num_test_imgs = num_test_imgs + 1
269
+ json_fixed.close()
270
+ else:
271
+ print(bike)
272
+ train.close()
273
+ query_v.close()
274
+ galley_v.close()
275
+ query_t.close()
276
+ galley_t.close()
277
+
278
+ data = {}
279
+ data["General"] = []
280
+ data["General"].append({
281
+ 'Num IDs': num_ids,
282
+ 'Num Bike types': len(models_dist.keys()),
283
+ 'Num Models': sum(len(models_dist[k].keys()) for k in models_dist.keys()),
284
+ 'Num images': num_imgs,
285
+ 'Num bent images': num_bent_imgs,
286
+ 'Num broken images': num_broken_imgs,
287
+ 'Num damaged images': num_damaged_imgs,
288
+ 'Num images with missing parts': num_missingpart_imgs,
289
+ 'Num images with missing Front Wheel': nums_missingpart_imgs[0],
290
+ 'Num images with missing Rear Wheel': nums_missingpart_imgs[1],
291
+ 'Num images with missing Seat': nums_missingpart_imgs[2],
292
+ 'Num images with missing Handlebar': nums_missingpart_imgs[3],
293
+ 'Num images with missing Pedals': nums_missingpart_imgs[4]
294
+ })
295
+ data["Train"] = []
296
+ data["Train"].append({
297
+ 'Num IDs': num_train_ids,
298
+ 'Num Bike types': len(models_dist_train.keys()),
299
+ 'Num Models': sum(len(models_dist_train[k].keys()) for k in models_dist_train.keys()),
300
+ 'Num images': num_train_imgs,
301
+ 'Num bent images': num_bent_train_imgs,
302
+ 'Num broken images': num_broken_train_imgs,
303
+ 'Num damaged images': num_damaged_train_imgs,
304
+ 'Num images with missing parts': num_missingpart_train_imgs,
305
+ 'Num images with missing Front Wheel': nums_missingpart_train_imgs[0],
306
+ 'Num images with missing Rear Wheel': nums_missingpart_train_imgs[1],
307
+ 'Num images with missing Seat': nums_missingpart_train_imgs[2],
308
+ 'Num images with missing Handlebar': nums_missingpart_train_imgs[3],
309
+ 'Num images with missing Pedals': nums_missingpart_train_imgs[4]
310
+ })
311
+ data["Validation"] = []
312
+ data["Validation"].append({
313
+ 'Num IDs': num_val_ids,
314
+ 'Num Bike types': len(models_dist_val.keys()),
315
+ 'Num Models': sum(len(models_dist_val[k].keys()) for k in models_dist_val.keys()),
316
+ 'Num images': num_val_imgs,
317
+ 'Num bent images': num_bent_val_imgs,
318
+ 'Num broken images': num_broken_val_imgs,
319
+ 'Num damaged images': num_damaged_val_imgs,
320
+ 'Num images with missing parts': num_missingpart_val_imgs,
321
+ 'Num images with missing Front Wheel': nums_missingpart_val_imgs[0],
322
+ 'Num images with missing Rear Wheel': nums_missingpart_val_imgs[1],
323
+ 'Num images with missing Seat': nums_missingpart_val_imgs[2],
324
+ 'Num images with missing Handlebar': nums_missingpart_val_imgs[3],
325
+ 'Num images with missing Pedals': nums_missingpart_val_imgs[4]
326
+ })
327
+ data["Test"] = []
328
+ data["Test"].append({
329
+ 'Num IDs': num_test_ids,
330
+ 'Num Bike types': len(models_dist_test.keys()),
331
+ 'Num Models': sum(len(models_dist_test[k].keys()) for k in models_dist_test.keys()),
332
+ 'Num images': num_test_imgs,
333
+ 'Num bent images': num_bent_test_imgs,
334
+ 'Num broken images': num_broken_test_imgs,
335
+ 'Num damaged images': num_damaged_test_imgs,
336
+ 'Num images with missing parts': num_missingpart_test_imgs,
337
+ 'Num images with missing Front Wheel': nums_missingpart_test_imgs[0],
338
+ 'Num images with missing Rear Wheel': nums_missingpart_test_imgs[1],
339
+ 'Num images with missing Seat': nums_missingpart_test_imgs[2],
340
+ 'Num images with missing Handlebar': nums_missingpart_test_imgs[3],
341
+ 'Num images with missing Pedals': nums_missingpart_test_imgs[4]
342
+ })
343
+
344
+ with open('bike_current_split_stats.json', 'w', encoding='utf-8') as f:
345
+ json.dump(data, f, ensure_ascii=False, indent=4)