root-sajjan commited on
Commit
4e269c9
·
verified ·
1 Parent(s): 9cdce5c
Files changed (1) hide show
  1. llm/inference.py +38 -38
llm/inference.py CHANGED
@@ -98,45 +98,45 @@ def extract_info(text):
98
  print(output)
99
 
100
 
101
-
102
  def get_name(url, object):
103
- messages = [
104
- {
105
- "role": "user",
106
- "content": [
107
- {
108
- "type": "text",
109
- "text": f"Is this a {object}?. Can you guess what it is and give me the closest brand it resembles to? or a model number? And give me its average price in today's market in USD. In output, give me its normal name, model name, model number and price. separated by commas. No description is needed."
110
- },
111
- {
112
- "type": "image_url",
113
- "image_url": {
114
- "url": url
115
- }
116
- }
117
- ]
118
- }
119
- ]
120
-
121
- completion = client.chat.completions.create(
122
- model="meta-llama/Llama-3.2-11B-Vision-Instruct",
123
- messages=messages,
124
- max_tokens=500
125
- )
126
-
127
-
128
- print(f'\n\nNow output of LLM:\n')
129
- llm_result = completion.choices[0].message['content']
130
- print(llm_result)
131
- print(f'\n\nThat is the output')
132
-
133
- result = extract_product_info(llm_result)
134
- print(f'\n\nResult brand and price:{result}')
135
-
136
- # result2 = extract_info(llm_result)
137
- # print(f'\n\nFrom Google llm:{result2}')
138
-
139
- return result
 
140
 
141
  # url = "https://i.ibb.co/mNYvqDL/crop_39.jpg"
142
  # object="fridge"
 
98
  print(output)
99
 
100
 
 
101
  def get_name(url, object):
102
+ messages = [
103
+ {
104
+ "role": "user",
105
+ "content": [
106
+ {
107
+ "type": "text",
108
+ "text": f"Is this a {object}?. Can you guess what it is and give me the closest brand it resembles to? or a model number? And give me its average price in today's market in USD. In output, give me its normal name, model name, model number and price. separated by commas. No description is needed."
109
+ },
110
+ {
111
+ "type": "image_url",
112
+ "image_url": {
113
+ "url": url
114
+ }
115
+ }
116
+ ]
117
+ }
118
+ ]
119
+
120
+ completion = client.chat.completions.create(
121
+ model="meta-llama/Llama-3.2-11B-Vision-Instruct",
122
+ messages=messages,
123
+ max_tokens=500
124
+ )
125
+
126
+ print(f'\n\nNow output of LLM:\n')
127
+ llm_result = completion.choices[0].message['content']
128
+ print(llm_result)
129
+
130
+ # print(f'\n\nThat is the output')
131
+ print(f"Extracting from the output now, function calling")
132
+ result = extract_product_info(llm_result)
133
+ print(f'\n\nResult brand and price:{result}')
134
+ print(f'\n\nThat is the output')
135
+
136
+ # result2 = extract_info(llm_result)
137
+ # print(f'\n\nFrom Google llm:{result2}')
138
+
139
+ return result
140
 
141
  # url = "https://i.ibb.co/mNYvqDL/crop_39.jpg"
142
  # object="fridge"