Spaces:
Sleeping
Sleeping
LuckyHappyFish
commited on
Commit
·
df033c3
1
Parent(s):
07326b1
Im strugglign
Browse files
app.py
CHANGED
@@ -19,9 +19,8 @@ def local_css():
|
|
19 |
"""
|
20 |
<style>
|
21 |
/* Main layout */
|
22 |
-
.main {
|
23 |
-
|
24 |
-
}
|
25 |
/* Title styling */
|
26 |
.title h1 {
|
27 |
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
@@ -30,22 +29,21 @@ def local_css():
|
|
30 |
font-size: 3rem;
|
31 |
margin-bottom: 20px;
|
32 |
}
|
|
|
33 |
/* Image styling */
|
34 |
.st-image img {
|
35 |
border-radius: 15px;
|
36 |
margin-bottom: 20px;
|
37 |
max-width: 100%;
|
38 |
}
|
|
|
39 |
/* Sidebar styling */
|
40 |
[data-testid="stSidebar"] {
|
41 |
background-color: #ff4b4b;
|
42 |
}
|
43 |
-
[data-testid="stSidebar"] .css-ng1t4o {
|
44 |
-
|
45 |
-
|
46 |
-
[data-testid="stSidebar"] .css-1d391kg {
|
47 |
-
color: white;
|
48 |
-
}
|
49 |
/* File uploader styling */
|
50 |
.stFileUploader {
|
51 |
border: 2px dashed #ff4b4b;
|
@@ -56,10 +54,12 @@ def local_css():
|
|
56 |
background-color: #ffffff;
|
57 |
font-weight: bold;
|
58 |
}
|
|
|
59 |
/* File uploader hover effect */
|
60 |
.stFileUploader:hover {
|
61 |
background-color: #ffe5e5;
|
62 |
}
|
|
|
63 |
/* Button styling */
|
64 |
.stButton>button {
|
65 |
background-color: #ff4b4b;
|
@@ -75,40 +75,25 @@ def local_css():
|
|
75 |
background-color: #e04343;
|
76 |
color: white;
|
77 |
}
|
|
|
78 |
/* Headers styling */
|
79 |
-
h2 {
|
80 |
-
|
81 |
-
|
82 |
-
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
83 |
-
}
|
84 |
-
h3 {
|
85 |
-
color: #ff4b4b;
|
86 |
-
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
87 |
-
}
|
88 |
/* Text styling */
|
89 |
-
.stMarkdown p {
|
90 |
-
|
91 |
-
}
|
92 |
/* Footer styling */
|
93 |
-
footer {
|
94 |
-
|
95 |
-
}
|
96 |
/* Hide sidebar on small screens */
|
97 |
@media only screen and (max-width: 600px) {
|
98 |
-
[data-testid="stSidebar"] {
|
99 |
-
|
100 |
-
}
|
101 |
-
.
|
102 |
-
padding-left: 1rem;
|
103 |
-
padding-right: 1rem;
|
104 |
-
}
|
105 |
-
.title h1 {
|
106 |
-
font-size: 2rem;
|
107 |
-
}
|
108 |
-
.stButton>button {
|
109 |
-
width: 100%;
|
110 |
-
}
|
111 |
}
|
|
|
112 |
/* Sample images grid */
|
113 |
.sample-images {
|
114 |
display: flex;
|
@@ -128,8 +113,7 @@ def local_css():
|
|
128 |
border: 2px solid #ff4b4b;
|
129 |
}
|
130 |
</style>
|
131 |
-
""",
|
132 |
-
unsafe_allow_html=True
|
133 |
)
|
134 |
|
135 |
local_css()
|
@@ -143,19 +127,14 @@ client = InferenceClient(api_key=API_KEY)
|
|
143 |
# Load the image classification pipeline
|
144 |
@st.cache_resource
|
145 |
def load_image_classification_pipeline():
|
146 |
-
"""
|
147 |
-
Load the image classification pipeline using a pretrained model.
|
148 |
-
"""
|
149 |
return pipeline("image-classification", model="Shresthadev403/food-image-classification")
|
150 |
|
151 |
pipe_classification = load_image_classification_pipeline()
|
152 |
|
153 |
# Function to generate ingredients using Hugging Face Inference Client
|
154 |
def get_ingredients_qwen(food_name):
|
155 |
-
"""
|
156 |
-
Generate a list of ingredients for the given food item using Qwen NLP model.
|
157 |
-
Returns a clean, comma-separated list of ingredients.
|
158 |
-
"""
|
159 |
messages = [
|
160 |
{
|
161 |
"role": "user",
|
@@ -165,9 +144,7 @@ def get_ingredients_qwen(food_name):
|
|
165 |
]
|
166 |
try:
|
167 |
completion = client.chat.completions.create(
|
168 |
-
model="Qwen/Qwen2.5-Coder-32B-Instruct",
|
169 |
-
messages=messages,
|
170 |
-
max_tokens=50
|
171 |
)
|
172 |
generated_text = completion.choices[0].message["content"].strip()
|
173 |
return generated_text
|
@@ -198,7 +175,6 @@ sample_images = {
|
|
198 |
"Sushi": "sample_images/sushi.jpg",
|
199 |
"Salad": "sample_images/salad.jpg"
|
200 |
}
|
201 |
-
|
202 |
cols = st.columns(len(sample_images))
|
203 |
for idx, (name, file_path) in enumerate(sample_images.items()):
|
204 |
with cols[idx]:
|
@@ -208,14 +184,11 @@ for idx, (name, file_path) in enumerate(sample_images.items()):
|
|
208 |
# File uploader
|
209 |
st.subheader("Upload a food image:")
|
210 |
uploaded_file = st.file_uploader("", type=["jpg", "png", "jpeg"])
|
211 |
-
|
212 |
if 'uploaded_file' in locals() and uploaded_file is not None:
|
213 |
# Display the uploaded image
|
214 |
-
if isinstance(uploaded_file, str):
|
215 |
-
# Sample image selected
|
216 |
image = Image.open(uploaded_file)
|
217 |
-
else:
|
218 |
-
# User uploaded image
|
219 |
image = Image.open(uploaded_file)
|
220 |
st.image(image, caption="Uploaded Image", use_container_width=True)
|
221 |
|
@@ -224,11 +197,10 @@ if 'uploaded_file' in locals() and uploaded_file is not None:
|
|
224 |
with st.spinner("Classifying..."):
|
225 |
# Make predictions
|
226 |
predictions = pipe_classification(image)
|
227 |
-
|
228 |
# Display only the top prediction
|
229 |
top_food = predictions[0]['label']
|
230 |
st.header(f"🍽️ Food: {top_food}")
|
231 |
-
|
232 |
# Generate and display ingredients for the top prediction
|
233 |
st.subheader("📝 Ingredients")
|
234 |
try:
|
@@ -241,12 +213,10 @@ if 'uploaded_file' in locals() and uploaded_file is not None:
|
|
241 |
try:
|
242 |
client_gradio = Client("https://8a56cb969da1f9d721.gradio.live/")
|
243 |
result = client_gradio.predict(
|
244 |
-
query=f"What's a healthy {top_food} recipe, and why is it healthy?",
|
245 |
-
api_name="/get_response"
|
246 |
)
|
247 |
st.write(result)
|
248 |
except Exception as e:
|
249 |
st.error(f"Unable to contact RAG: {e}")
|
250 |
else:
|
251 |
st.info("Please select or upload an image to get started.")
|
252 |
-
|
|
|
19 |
"""
|
20 |
<style>
|
21 |
/* Main layout */
|
22 |
+
.main { background-color: #f0f2f6; }
|
23 |
+
|
|
|
24 |
/* Title styling */
|
25 |
.title h1 {
|
26 |
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
|
|
29 |
font-size: 3rem;
|
30 |
margin-bottom: 20px;
|
31 |
}
|
32 |
+
|
33 |
/* Image styling */
|
34 |
.st-image img {
|
35 |
border-radius: 15px;
|
36 |
margin-bottom: 20px;
|
37 |
max-width: 100%;
|
38 |
}
|
39 |
+
|
40 |
/* Sidebar styling */
|
41 |
[data-testid="stSidebar"] {
|
42 |
background-color: #ff4b4b;
|
43 |
}
|
44 |
+
[data-testid="stSidebar"] .css-ng1t4o { color: white; }
|
45 |
+
[data-testid="stSidebar"] .css-1d391kg { color: white; }
|
46 |
+
|
|
|
|
|
|
|
47 |
/* File uploader styling */
|
48 |
.stFileUploader {
|
49 |
border: 2px dashed #ff4b4b;
|
|
|
54 |
background-color: #ffffff;
|
55 |
font-weight: bold;
|
56 |
}
|
57 |
+
|
58 |
/* File uploader hover effect */
|
59 |
.stFileUploader:hover {
|
60 |
background-color: #ffe5e5;
|
61 |
}
|
62 |
+
|
63 |
/* Button styling */
|
64 |
.stButton>button {
|
65 |
background-color: #ff4b4b;
|
|
|
75 |
background-color: #e04343;
|
76 |
color: white;
|
77 |
}
|
78 |
+
|
79 |
/* Headers styling */
|
80 |
+
h2 { color: #ff4b4b; margin-top: 30px; font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif; }
|
81 |
+
h3 { color: #ff4b4b; font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif; }
|
82 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
83 |
/* Text styling */
|
84 |
+
.stMarkdown p { font-size: 1.1rem; }
|
85 |
+
|
|
|
86 |
/* Footer styling */
|
87 |
+
footer { visibility: hidden; }
|
88 |
+
|
|
|
89 |
/* Hide sidebar on small screens */
|
90 |
@media only screen and (max-width: 600px) {
|
91 |
+
[data-testid="stSidebar"] { display: none; }
|
92 |
+
.main .block-container { padding-left: 1rem; padding-right: 1rem; }
|
93 |
+
.title h1 { font-size: 2rem; }
|
94 |
+
.stButton>button { width: 100%; }
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
95 |
}
|
96 |
+
|
97 |
/* Sample images grid */
|
98 |
.sample-images {
|
99 |
display: flex;
|
|
|
113 |
border: 2px solid #ff4b4b;
|
114 |
}
|
115 |
</style>
|
116 |
+
""", unsafe_allow_html=True
|
|
|
117 |
)
|
118 |
|
119 |
local_css()
|
|
|
127 |
# Load the image classification pipeline
|
128 |
@st.cache_resource
|
129 |
def load_image_classification_pipeline():
|
130 |
+
""" Load the image classification pipeline using a pretrained model. """
|
|
|
|
|
131 |
return pipeline("image-classification", model="Shresthadev403/food-image-classification")
|
132 |
|
133 |
pipe_classification = load_image_classification_pipeline()
|
134 |
|
135 |
# Function to generate ingredients using Hugging Face Inference Client
|
136 |
def get_ingredients_qwen(food_name):
|
137 |
+
""" Generate a list of ingredients for the given food item using Qwen NLP model. Returns a clean, comma-separated list of ingredients. """
|
|
|
|
|
|
|
138 |
messages = [
|
139 |
{
|
140 |
"role": "user",
|
|
|
144 |
]
|
145 |
try:
|
146 |
completion = client.chat.completions.create(
|
147 |
+
model="Qwen/Qwen2.5-Coder-32B-Instruct", messages=messages, max_tokens=50
|
|
|
|
|
148 |
)
|
149 |
generated_text = completion.choices[0].message["content"].strip()
|
150 |
return generated_text
|
|
|
175 |
"Sushi": "sample_images/sushi.jpg",
|
176 |
"Salad": "sample_images/salad.jpg"
|
177 |
}
|
|
|
178 |
cols = st.columns(len(sample_images))
|
179 |
for idx, (name, file_path) in enumerate(sample_images.items()):
|
180 |
with cols[idx]:
|
|
|
184 |
# File uploader
|
185 |
st.subheader("Upload a food image:")
|
186 |
uploaded_file = st.file_uploader("", type=["jpg", "png", "jpeg"])
|
|
|
187 |
if 'uploaded_file' in locals() and uploaded_file is not None:
|
188 |
# Display the uploaded image
|
189 |
+
if isinstance(uploaded_file, str): # Sample image selected
|
|
|
190 |
image = Image.open(uploaded_file)
|
191 |
+
else: # User uploaded image
|
|
|
192 |
image = Image.open(uploaded_file)
|
193 |
st.image(image, caption="Uploaded Image", use_container_width=True)
|
194 |
|
|
|
197 |
with st.spinner("Classifying..."):
|
198 |
# Make predictions
|
199 |
predictions = pipe_classification(image)
|
|
|
200 |
# Display only the top prediction
|
201 |
top_food = predictions[0]['label']
|
202 |
st.header(f"🍽️ Food: {top_food}")
|
203 |
+
|
204 |
# Generate and display ingredients for the top prediction
|
205 |
st.subheader("📝 Ingredients")
|
206 |
try:
|
|
|
213 |
try:
|
214 |
client_gradio = Client("https://8a56cb969da1f9d721.gradio.live/")
|
215 |
result = client_gradio.predict(
|
216 |
+
query=f"What's a healthy {top_food} recipe, and why is it healthy?", api_name="/get_response"
|
|
|
217 |
)
|
218 |
st.write(result)
|
219 |
except Exception as e:
|
220 |
st.error(f"Unable to contact RAG: {e}")
|
221 |
else:
|
222 |
st.info("Please select or upload an image to get started.")
|
|