Spaces:
Sleeping
Sleeping
HassanDataSci
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -1,12 +1,10 @@
|
|
1 |
import streamlit as st
|
2 |
-
from transformers import pipeline
|
3 |
from PIL import Image
|
4 |
-
import google.generativeai as palm
|
5 |
import os
|
6 |
|
7 |
-
#
|
8 |
-
os.environ["
|
9 |
-
palm.configure(api_key=os.environ["GOOGLE_API_KEY"])
|
10 |
|
11 |
# Load the image classification pipeline
|
12 |
@st.cache_resource
|
@@ -18,18 +16,27 @@ def load_image_classification_pipeline():
|
|
18 |
|
19 |
pipe_classification = load_image_classification_pipeline()
|
20 |
|
21 |
-
#
|
22 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
"""
|
24 |
-
Generate a list of ingredients for the given food item using
|
25 |
"""
|
26 |
prompt = f"List the main ingredients typically used to prepare {food_name}."
|
27 |
try:
|
28 |
-
response =
|
29 |
-
|
30 |
-
return response["candidates"][0]["output"]
|
31 |
-
else:
|
32 |
-
return "No ingredients found."
|
33 |
except Exception as e:
|
34 |
return f"Error generating ingredients: {e}"
|
35 |
|
@@ -42,7 +49,7 @@ st.image("IR_IMAGE.png", caption="Food Recognition Model", use_column_width=True
|
|
42 |
# Sidebar for model information
|
43 |
st.sidebar.title("Model Information")
|
44 |
st.sidebar.write("**Image Classification Model**: Shresthadev403/food-image-classification")
|
45 |
-
st.sidebar.write("**LLM for Ingredients**:
|
46 |
|
47 |
# Upload image
|
48 |
uploaded_file = st.file_uploader("Choose a food image...", type=["jpg", "png", "jpeg"])
|
@@ -63,7 +70,7 @@ if uploaded_file is not None:
|
|
63 |
# Generate and display ingredients for the top prediction
|
64 |
st.subheader("Ingredients")
|
65 |
try:
|
66 |
-
ingredients =
|
67 |
st.write(ingredients)
|
68 |
except Exception as e:
|
69 |
st.error(f"Error generating ingredients: {e}")
|
|
|
1 |
import streamlit as st
|
2 |
+
from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
|
3 |
from PIL import Image
|
|
|
4 |
import os
|
5 |
|
6 |
+
# Hugging Face token login (add this as a secret in Hugging Face Spaces)
|
7 |
+
os.environ["HF_TOKEN"] = st.secrets["HF_TOKEN"]
|
|
|
8 |
|
9 |
# Load the image classification pipeline
|
10 |
@st.cache_resource
|
|
|
16 |
|
17 |
pipe_classification = load_image_classification_pipeline()
|
18 |
|
19 |
+
# Load the Llama model for ingredient generation
|
20 |
+
@st.cache_resource
|
21 |
+
def load_llama_pipeline():
|
22 |
+
"""
|
23 |
+
Load the Llama model for ingredient generation.
|
24 |
+
"""
|
25 |
+
tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-3.2-3B-Instruct", use_auth_token=os.environ["HF_TOKEN"])
|
26 |
+
model = AutoModelForCausalLM.from_pretrained("meta-llama/Llama-3.2-3B-Instruct", use_auth_token=os.environ["HF_TOKEN"])
|
27 |
+
return pipeline("text-generation", model=model, tokenizer=tokenizer)
|
28 |
+
|
29 |
+
pipe_llama = load_llama_pipeline()
|
30 |
+
|
31 |
+
# Function to generate ingredients using the Llama model
|
32 |
+
def get_ingredients_llama(food_name):
|
33 |
"""
|
34 |
+
Generate a list of ingredients for the given food item using the Llama model.
|
35 |
"""
|
36 |
prompt = f"List the main ingredients typically used to prepare {food_name}."
|
37 |
try:
|
38 |
+
response = pipe_llama(prompt, max_length=50, num_return_sequences=1)
|
39 |
+
return response[0]["generated_text"].strip()
|
|
|
|
|
|
|
40 |
except Exception as e:
|
41 |
return f"Error generating ingredients: {e}"
|
42 |
|
|
|
49 |
# Sidebar for model information
|
50 |
st.sidebar.title("Model Information")
|
51 |
st.sidebar.write("**Image Classification Model**: Shresthadev403/food-image-classification")
|
52 |
+
st.sidebar.write("**LLM for Ingredients**: meta-llama/Llama-3.2-3B-Instruct")
|
53 |
|
54 |
# Upload image
|
55 |
uploaded_file = st.file_uploader("Choose a food image...", type=["jpg", "png", "jpeg"])
|
|
|
70 |
# Generate and display ingredients for the top prediction
|
71 |
st.subheader("Ingredients")
|
72 |
try:
|
73 |
+
ingredients = get_ingredients_llama(top_food)
|
74 |
st.write(ingredients)
|
75 |
except Exception as e:
|
76 |
st.error(f"Error generating ingredients: {e}")
|