franz96521
commited on
Commit
·
85a5368
1
Parent(s):
ad5cb02
camera
Browse files- billetes.ipynb +96 -48
billetes.ipynb
CHANGED
@@ -2,7 +2,7 @@
|
|
2 |
"cells": [
|
3 |
{
|
4 |
"cell_type": "code",
|
5 |
-
"execution_count":
|
6 |
"metadata": {},
|
7 |
"outputs": [
|
8 |
{
|
@@ -34,7 +34,7 @@
|
|
34 |
},
|
35 |
{
|
36 |
"cell_type": "code",
|
37 |
-
"execution_count":
|
38 |
"metadata": {},
|
39 |
"outputs": [],
|
40 |
"source": [
|
@@ -49,7 +49,7 @@
|
|
49 |
},
|
50 |
{
|
51 |
"cell_type": "code",
|
52 |
-
"execution_count":
|
53 |
"metadata": {},
|
54 |
"outputs": [
|
55 |
{
|
@@ -85,7 +85,7 @@
|
|
85 |
},
|
86 |
{
|
87 |
"cell_type": "code",
|
88 |
-
"execution_count":
|
89 |
"metadata": {},
|
90 |
"outputs": [
|
91 |
{
|
@@ -105,7 +105,7 @@
|
|
105 |
},
|
106 |
{
|
107 |
"cell_type": "code",
|
108 |
-
"execution_count":
|
109 |
"metadata": {},
|
110 |
"outputs": [],
|
111 |
"source": [
|
@@ -134,7 +134,7 @@
|
|
134 |
},
|
135 |
{
|
136 |
"cell_type": "code",
|
137 |
-
"execution_count":
|
138 |
"metadata": {},
|
139 |
"outputs": [
|
140 |
{
|
@@ -182,7 +182,7 @@
|
|
182 |
},
|
183 |
{
|
184 |
"cell_type": "code",
|
185 |
-
"execution_count":
|
186 |
"metadata": {},
|
187 |
"outputs": [],
|
188 |
"source": [
|
@@ -200,11 +200,22 @@
|
|
200 |
},
|
201 |
{
|
202 |
"cell_type": "code",
|
203 |
-
"execution_count":
|
204 |
"metadata": {},
|
205 |
-
"outputs": [
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
206 |
"source": [
|
207 |
-
"model.load_weights((weights_path+'/
|
208 |
]
|
209 |
},
|
210 |
{
|
@@ -305,14 +316,7 @@
|
|
305 |
},
|
306 |
{
|
307 |
"cell_type": "code",
|
308 |
-
"execution_count":
|
309 |
-
"metadata": {},
|
310 |
-
"outputs": [],
|
311 |
-
"source": []
|
312 |
-
},
|
313 |
-
{
|
314 |
-
"cell_type": "code",
|
315 |
-
"execution_count": 15,
|
316 |
"metadata": {},
|
317 |
"outputs": [],
|
318 |
"source": [
|
@@ -328,7 +332,7 @@
|
|
328 |
},
|
329 |
{
|
330 |
"cell_type": "code",
|
331 |
-
"execution_count":
|
332 |
"metadata": {},
|
333 |
"outputs": [
|
334 |
{
|
@@ -341,10 +345,10 @@
|
|
341 |
{
|
342 |
"data": {
|
343 |
"text/plain": [
|
344 |
-
"<matplotlib.image.AxesImage at
|
345 |
]
|
346 |
},
|
347 |
-
"execution_count":
|
348 |
"metadata": {},
|
349 |
"output_type": "execute_result"
|
350 |
},
|
@@ -371,7 +375,7 @@
|
|
371 |
},
|
372 |
{
|
373 |
"cell_type": "code",
|
374 |
-
"execution_count":
|
375 |
"metadata": {},
|
376 |
"outputs": [
|
377 |
{
|
@@ -380,7 +384,7 @@
|
|
380 |
"'50'"
|
381 |
]
|
382 |
},
|
383 |
-
"execution_count":
|
384 |
"metadata": {},
|
385 |
"output_type": "execute_result"
|
386 |
}
|
@@ -393,50 +397,94 @@
|
|
393 |
]
|
394 |
},
|
395 |
{
|
396 |
-
"cell_type": "
|
|
|
397 |
"metadata": {},
|
|
|
398 |
"source": [
|
399 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
400 |
]
|
401 |
},
|
402 |
{
|
403 |
"cell_type": "code",
|
404 |
-
"execution_count":
|
405 |
"metadata": {},
|
406 |
"outputs": [
|
407 |
{
|
408 |
"name": "stdout",
|
409 |
"output_type": "stream",
|
410 |
"text": [
|
411 |
-
"
|
412 |
]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
413 |
}
|
414 |
],
|
415 |
"source": [
|
416 |
-
"
|
417 |
"\n",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
418 |
"import cv2\n",
|
419 |
-
"captura = cv2.VideoCapture(0)\n",
|
420 |
-
"captura.set(cv2.CAP_PROP_FRAME_WIDTH, 951)\n",
|
421 |
-
"captura.set(cv2.CAP_PROP_FRAME_HEIGHT, 524)\n",
|
422 |
-
"while (captura.isOpened()):\n",
|
423 |
-
" ret, imagen = captura.read()\n",
|
424 |
-
" if ret == True:\n",
|
425 |
-
" # color_coverted = cv2.cvtColor(imagen, cv2.COLOR_BGR2RGB)\n",
|
426 |
-
" img=Image.fromarray(imagen)\n",
|
427 |
"\n",
|
428 |
-
"
|
429 |
-
"
|
430 |
-
"
|
431 |
-
"
|
432 |
-
"
|
433 |
-
"
|
434 |
-
"
|
435 |
-
"
|
436 |
-
"
|
437 |
-
"
|
438 |
-
"
|
439 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
440 |
"cv2.destroyAllWindows()"
|
441 |
]
|
442 |
},
|
|
|
2 |
"cells": [
|
3 |
{
|
4 |
"cell_type": "code",
|
5 |
+
"execution_count": 2,
|
6 |
"metadata": {},
|
7 |
"outputs": [
|
8 |
{
|
|
|
34 |
},
|
35 |
{
|
36 |
"cell_type": "code",
|
37 |
+
"execution_count": 3,
|
38 |
"metadata": {},
|
39 |
"outputs": [],
|
40 |
"source": [
|
|
|
49 |
},
|
50 |
{
|
51 |
"cell_type": "code",
|
52 |
+
"execution_count": 4,
|
53 |
"metadata": {},
|
54 |
"outputs": [
|
55 |
{
|
|
|
85 |
},
|
86 |
{
|
87 |
"cell_type": "code",
|
88 |
+
"execution_count": 5,
|
89 |
"metadata": {},
|
90 |
"outputs": [
|
91 |
{
|
|
|
105 |
},
|
106 |
{
|
107 |
"cell_type": "code",
|
108 |
+
"execution_count": 6,
|
109 |
"metadata": {},
|
110 |
"outputs": [],
|
111 |
"source": [
|
|
|
134 |
},
|
135 |
{
|
136 |
"cell_type": "code",
|
137 |
+
"execution_count": 7,
|
138 |
"metadata": {},
|
139 |
"outputs": [
|
140 |
{
|
|
|
182 |
},
|
183 |
{
|
184 |
"cell_type": "code",
|
185 |
+
"execution_count": 8,
|
186 |
"metadata": {},
|
187 |
"outputs": [],
|
188 |
"source": [
|
|
|
200 |
},
|
201 |
{
|
202 |
"cell_type": "code",
|
203 |
+
"execution_count": 9,
|
204 |
"metadata": {},
|
205 |
+
"outputs": [
|
206 |
+
{
|
207 |
+
"data": {
|
208 |
+
"text/plain": [
|
209 |
+
"<tensorflow.python.training.tracking.util.CheckpointLoadStatus at 0x1f2f632d0d0>"
|
210 |
+
]
|
211 |
+
},
|
212 |
+
"execution_count": 9,
|
213 |
+
"metadata": {},
|
214 |
+
"output_type": "execute_result"
|
215 |
+
}
|
216 |
+
],
|
217 |
"source": [
|
218 |
+
"model.load_weights((weights_path+'/weights2'))"
|
219 |
]
|
220 |
},
|
221 |
{
|
|
|
316 |
},
|
317 |
{
|
318 |
"cell_type": "code",
|
319 |
+
"execution_count": 19,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
320 |
"metadata": {},
|
321 |
"outputs": [],
|
322 |
"source": [
|
|
|
332 |
},
|
333 |
{
|
334 |
"cell_type": "code",
|
335 |
+
"execution_count": 18,
|
336 |
"metadata": {},
|
337 |
"outputs": [
|
338 |
{
|
|
|
345 |
{
|
346 |
"data": {
|
347 |
"text/plain": [
|
348 |
+
"<matplotlib.image.AxesImage at 0x1f2f8294910>"
|
349 |
]
|
350 |
},
|
351 |
+
"execution_count": 18,
|
352 |
"metadata": {},
|
353 |
"output_type": "execute_result"
|
354 |
},
|
|
|
375 |
},
|
376 |
{
|
377 |
"cell_type": "code",
|
378 |
+
"execution_count": 15,
|
379 |
"metadata": {},
|
380 |
"outputs": [
|
381 |
{
|
|
|
384 |
"'50'"
|
385 |
]
|
386 |
},
|
387 |
+
"execution_count": 15,
|
388 |
"metadata": {},
|
389 |
"output_type": "execute_result"
|
390 |
}
|
|
|
397 |
]
|
398 |
},
|
399 |
{
|
400 |
+
"cell_type": "code",
|
401 |
+
"execution_count": 22,
|
402 |
"metadata": {},
|
403 |
+
"outputs": [],
|
404 |
"source": [
|
405 |
+
"def get_prediction(img):\n",
|
406 |
+
" img = np.array(img)/255.0\n",
|
407 |
+
" #print(img.shape)\n",
|
408 |
+
" #plt.imshow(img)\n",
|
409 |
+
" result = model.predict(img[np.newaxis,...])\n",
|
410 |
+
" result.shape\n",
|
411 |
+
" predicted_class = tf.math.argmax(result[0], axis=-1)\n",
|
412 |
+
" return class_names[int(predicted_class)]"
|
413 |
]
|
414 |
},
|
415 |
{
|
416 |
"cell_type": "code",
|
417 |
+
"execution_count": 20,
|
418 |
"metadata": {},
|
419 |
"outputs": [
|
420 |
{
|
421 |
"name": "stdout",
|
422 |
"output_type": "stream",
|
423 |
"text": [
|
424 |
+
"(224, 224, 3)\n"
|
425 |
]
|
426 |
+
},
|
427 |
+
{
|
428 |
+
"data": {
|
429 |
+
"text/plain": [
|
430 |
+
"'50'"
|
431 |
+
]
|
432 |
+
},
|
433 |
+
"execution_count": 20,
|
434 |
+
"metadata": {},
|
435 |
+
"output_type": "execute_result"
|
436 |
}
|
437 |
],
|
438 |
"source": [
|
439 |
+
"img = Image.open('test.jpg').resize((224,224))\n",
|
440 |
"\n",
|
441 |
+
"get_prediction(img)"
|
442 |
+
]
|
443 |
+
},
|
444 |
+
{
|
445 |
+
"cell_type": "markdown",
|
446 |
+
"metadata": {},
|
447 |
+
"source": [
|
448 |
+
"# camera input"
|
449 |
+
]
|
450 |
+
},
|
451 |
+
{
|
452 |
+
"cell_type": "code",
|
453 |
+
"execution_count": 26,
|
454 |
+
"metadata": {},
|
455 |
+
"outputs": [],
|
456 |
+
"source": [
|
457 |
"import cv2\n",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
458 |
"\n",
|
459 |
+
"\n",
|
460 |
+
"def funcion(img):\n",
|
461 |
+
" gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n",
|
462 |
+
" gray = cv2.GaussianBlur(gray, (21, 21), 0)\n",
|
463 |
+
"\n",
|
464 |
+
" ret, imgt = cv2.threshold(gray, 138, 255, cv2.THRESH_BINARY_INV)\n",
|
465 |
+
"\n",
|
466 |
+
" cv2.imshow(\"Image threshold\", imgt)\n",
|
467 |
+
" countours, hierarchy = cv2.findContours(imgt.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n",
|
468 |
+
" rectangles = [cv2.boundingRect(countour) for countour in countours]\n",
|
469 |
+
" c = 0\n",
|
470 |
+
" for i , rect in enumerate(rectangles):\n",
|
471 |
+
" if rect[2] > 100 and rect[3] > 100:\n",
|
472 |
+
" imgn = img[rect[1]:rect[1] + rect[3], rect[0]:rect[0] + rect[2]]\n",
|
473 |
+
" imgn = cv2.resize(imgn, (100, 100))\n",
|
474 |
+
" c += 1 \n",
|
475 |
+
"\n",
|
476 |
+
" cv2.rectangle(img, (rect[0], rect[1]), (rect[0] + rect[2], rect[1] + rect[3]), (255, 0, 0), 2)\n",
|
477 |
+
" cv2.putText(img, str(get_prediction(imgn)), (rect[0], rect[1]), cv2.FONT_HERSHEY_SIMPLEX, 1, (200, 0, 0), 3, cv2.LINE_AA)\n",
|
478 |
+
" return img\n",
|
479 |
+
"\n",
|
480 |
+
"cam = cv2.VideoCapture(0)\n",
|
481 |
+
"while True:\n",
|
482 |
+
" val, img = cam.read()\n",
|
483 |
+
" img = funcion(img)\n",
|
484 |
+
" cv2.imshow(\"Image funcion\",img)\n",
|
485 |
+
" if cv2.waitKey(1) & 0xFF == ord('q'):\n",
|
486 |
+
" break\n",
|
487 |
+
"cam.release()\n",
|
488 |
"cv2.destroyAllWindows()"
|
489 |
]
|
490 |
},
|