Fix the issue where openrouter cannot use Gemini.
Browse files- json_str/Vertex/text.json +30 -0
- main.py +6 -4
- request.py +66 -1
- response.py +3 -0
json_str/Vertex/text.json
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"contents": [
|
2 |
+
{
|
3 |
+
"role": string,
|
4 |
+
"parts": [
|
5 |
+
{
|
6 |
+
// Union field data can be only one of the following:
|
7 |
+
"text": string,
|
8 |
+
"inlineData": {
|
9 |
+
"mimeType": string,
|
10 |
+
"data": string
|
11 |
+
},
|
12 |
+
"fileData": {
|
13 |
+
"mimeType": string,
|
14 |
+
"fileUri": string
|
15 |
+
},
|
16 |
+
// End of list of possible types for union field data.
|
17 |
+
"videoMetadata": {
|
18 |
+
"startOffset": {
|
19 |
+
"seconds": integer,
|
20 |
+
"nanos": integer
|
21 |
+
},
|
22 |
+
"endOffset": {
|
23 |
+
"seconds": integer,
|
24 |
+
"nanos": integer
|
25 |
+
}
|
26 |
+
}
|
27 |
+
}
|
28 |
+
]
|
29 |
+
}
|
30 |
+
],
|
main.py
CHANGED
@@ -87,9 +87,11 @@ async def process_request(request: RequestModel, provider: Dict):
|
|
87 |
engine = "gemini"
|
88 |
elif parsed_url.netloc == 'api.anthropic.com' or parsed_url.path.endswith("v1/message"):
|
89 |
engine = "claude"
|
|
|
|
|
90 |
else:
|
91 |
engine = "gpt"
|
92 |
-
print(engine)
|
93 |
|
94 |
url, headers, payload = await get_payload(request, engine, provider)
|
95 |
|
@@ -281,10 +283,10 @@ def generate_api_key():
|
|
281 |
api_key = "sk-" + secrets.token_urlsafe(32)
|
282 |
return {"api_key": api_key}
|
283 |
|
284 |
-
async def on_fetch(request, env):
|
285 |
-
|
286 |
|
287 |
-
|
288 |
|
289 |
if __name__ == '__main__':
|
290 |
import uvicorn
|
|
|
87 |
engine = "gemini"
|
88 |
elif parsed_url.netloc == 'api.anthropic.com' or parsed_url.path.endswith("v1/message"):
|
89 |
engine = "claude"
|
90 |
+
elif parsed_url.netloc == 'openrouter.ai':
|
91 |
+
engine = "openrouter"
|
92 |
else:
|
93 |
engine = "gpt"
|
94 |
+
print("engine", engine)
|
95 |
|
96 |
url, headers, payload = await get_payload(request, engine, provider)
|
97 |
|
|
|
283 |
api_key = "sk-" + secrets.token_urlsafe(32)
|
284 |
return {"api_key": api_key}
|
285 |
|
286 |
+
# async def on_fetch(request, env):
|
287 |
+
# import asgi
|
288 |
|
289 |
+
# return await asgi.fetch(app, request, env)
|
290 |
|
291 |
if __name__ == '__main__':
|
292 |
import uvicorn
|
request.py
CHANGED
@@ -28,7 +28,7 @@ async def get_image_message(base64_image, engine = None):
|
|
28 |
raise ValueError("Unknown engine")
|
29 |
|
30 |
async def get_text_message(role, message, engine = None):
|
31 |
-
if "gpt" == engine or "claude" == engine:
|
32 |
return {"type": "text", "text": message}
|
33 |
if "gemini" == engine:
|
34 |
return {"text": message}
|
@@ -151,6 +151,69 @@ async def get_gpt_payload(request, engine, provider):
|
|
151 |
|
152 |
return url, headers, payload
|
153 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
154 |
async def gpt2claude_tools_json(json_dict):
|
155 |
import copy
|
156 |
json_dict = copy.deepcopy(json_dict)
|
@@ -289,5 +352,7 @@ async def get_payload(request: RequestModel, engine, provider):
|
|
289 |
return await get_claude_payload(request, engine, provider)
|
290 |
elif engine == "gpt":
|
291 |
return await get_gpt_payload(request, engine, provider)
|
|
|
|
|
292 |
else:
|
293 |
raise ValueError("Unknown payload")
|
|
|
28 |
raise ValueError("Unknown engine")
|
29 |
|
30 |
async def get_text_message(role, message, engine = None):
|
31 |
+
if "gpt" == engine or "claude" == engine or "openrouter" == engine:
|
32 |
return {"type": "text", "text": message}
|
33 |
if "gemini" == engine:
|
34 |
return {"text": message}
|
|
|
151 |
|
152 |
return url, headers, payload
|
153 |
|
154 |
+
async def get_openrouter_payload(request, engine, provider):
|
155 |
+
headers = {
|
156 |
+
'Authorization': f"Bearer {provider['api']}",
|
157 |
+
'Content-Type': 'application/json'
|
158 |
+
}
|
159 |
+
url = provider['base_url']
|
160 |
+
|
161 |
+
messages = []
|
162 |
+
for msg in request.messages:
|
163 |
+
if isinstance(msg.content, list):
|
164 |
+
content = []
|
165 |
+
for item in msg.content:
|
166 |
+
if item.type == "text":
|
167 |
+
text_message = await get_text_message(msg.role, item.text, engine)
|
168 |
+
content.append(text_message)
|
169 |
+
elif item.type == "image_url":
|
170 |
+
image_message = await get_image_message(item.image_url.url, engine)
|
171 |
+
content.append(image_message)
|
172 |
+
else:
|
173 |
+
content = msg.content
|
174 |
+
name = msg.name
|
175 |
+
if name:
|
176 |
+
messages.append({"role": msg.role, "name": name, "content": content})
|
177 |
+
else:
|
178 |
+
# print("content", content)
|
179 |
+
if isinstance(content, list):
|
180 |
+
for item in content:
|
181 |
+
if item["type"] == "text":
|
182 |
+
messages.append({"role": msg.role, "content": item["text"]})
|
183 |
+
elif item["type"] == "image_url":
|
184 |
+
messages.append({"role": msg.role, "content": item["url"]})
|
185 |
+
else:
|
186 |
+
messages.append({"role": msg.role, "content": content})
|
187 |
+
|
188 |
+
model = provider['model'][request.model]
|
189 |
+
payload = {
|
190 |
+
"model": model,
|
191 |
+
"messages": messages,
|
192 |
+
}
|
193 |
+
|
194 |
+
miss_fields = [
|
195 |
+
'model',
|
196 |
+
'messages',
|
197 |
+
'tools',
|
198 |
+
'tool_choice',
|
199 |
+
'temperature',
|
200 |
+
'top_p',
|
201 |
+
'max_tokens',
|
202 |
+
'presence_penalty',
|
203 |
+
'frequency_penalty',
|
204 |
+
'n',
|
205 |
+
'user',
|
206 |
+
'include_usage',
|
207 |
+
'logprobs',
|
208 |
+
'top_logprobs'
|
209 |
+
]
|
210 |
+
|
211 |
+
for field, value in request.model_dump(exclude_unset=True).items():
|
212 |
+
if field not in miss_fields and value is not None:
|
213 |
+
payload[field] = value
|
214 |
+
|
215 |
+
return url, headers, payload
|
216 |
+
|
217 |
async def gpt2claude_tools_json(json_dict):
|
218 |
import copy
|
219 |
json_dict = copy.deepcopy(json_dict)
|
|
|
352 |
return await get_claude_payload(request, engine, provider)
|
353 |
elif engine == "gpt":
|
354 |
return await get_gpt_payload(request, engine, provider)
|
355 |
+
elif engine == "openrouter":
|
356 |
+
return await get_openrouter_payload(request, engine, provider)
|
357 |
else:
|
358 |
raise ValueError("Unknown payload")
|
response.py
CHANGED
@@ -154,6 +154,9 @@ async def fetch_response_stream(client, url, headers, payload, engine, model):
|
|
154 |
elif engine == "gpt":
|
155 |
async for chunk in fetch_gpt_response_stream(client, url, headers, payload):
|
156 |
yield chunk
|
|
|
|
|
|
|
157 |
else:
|
158 |
raise ValueError("Unknown response")
|
159 |
break
|
|
|
154 |
elif engine == "gpt":
|
155 |
async for chunk in fetch_gpt_response_stream(client, url, headers, payload):
|
156 |
yield chunk
|
157 |
+
elif engine == "openrouter":
|
158 |
+
async for chunk in fetch_gpt_response_stream(client, url, headers, payload):
|
159 |
+
yield chunk
|
160 |
else:
|
161 |
raise ValueError("Unknown response")
|
162 |
break
|