Rupesx007 commited on
Commit
ff941e1
·
verified ·
1 Parent(s): eb7bdc0

uploaded the main.py and the trained model

Browse files
Files changed (2) hide show
  1. ResNet152V2.h5 +3 -0
  2. main.py +42 -0
ResNet152V2.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:808a967828894055c50485d20212089f983333be723e3aeea2acd21becb0a5b2
3
+ size 240580936
main.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from PIL import Image
3
+ import tensorflow as tf
4
+ import numpy as np
5
+ import io
6
+
7
+ # Load your trained model
8
+ custom_objects = {'BatchNormalization': tf.keras.layers.BatchNormalization}
9
+ # model = tf.keras.models.load_model('ResNet152V2.h5')
10
+
11
+
12
+ # Define class labels of the animals
13
+ class_labels = ['Butterfly', 'Cat', 'Cow', 'Dog', 'Hen']
14
+
15
+ # Streamlit App
16
+ st.title("Image Classification App")
17
+
18
+ # Upload image through Streamlit interface
19
+ uploaded_file = st.file_uploader("Choose an image...", type="jpg")
20
+ #
21
+ # if uploaded_file is not None:
22
+ # # Read the bytes of the uploaded file
23
+ # image_bytes = uploaded_file.read()
24
+ #
25
+ # # Convert the bytes to a PIL Image
26
+ # image = Image.open(io.BytesIO(image_bytes))
27
+ # st.image(image, caption="Uploaded Image", use_column_width=True)
28
+ #
29
+ # # Preprocess the image for the model
30
+ # image = image.resize((256, 256)) # Adjust size as needed
31
+ # image_array = tf.keras.preprocessing.image.img_to_array(image)
32
+ # image_array = np.expand_dims(image_array, axis=0)
33
+ # image_array /= 255.0 # Normalize the pixel values to be between 0 and 1
34
+ #
35
+ # # Make predictions
36
+ # predictions = model.predict(image_array)
37
+ # predicted_class = np.argmax(predictions[0])
38
+ # confidence = predictions[0][predicted_class]
39
+ #
40
+ # # Display the predicted class and confidence
41
+ # st.write("Prediction:")
42
+ # st.write(f"Class: {class_labels[predicted_class]}, Confidence: {confidence:.2f}")