yym68686 commited on
Commit
cff82fa
·
1 Parent(s): b3ed199

Fix the issue of automatically joining without setting parameters.

Browse files
Files changed (1) hide show
  1. main.py +11 -11
main.py CHANGED
@@ -1,4 +1,5 @@
1
  import os
 
2
  import httpx
3
  import yaml
4
  from contextlib import asynccontextmanager
@@ -61,10 +62,10 @@ class Message(BaseModel):
61
  class RequestModel(BaseModel):
62
  model: str
63
  messages: List[Message]
64
- logprobs: Optional[bool] = False
65
  top_logprobs: Optional[int] = None
66
- stream: Optional[bool] = False
67
- include_usage: Optional[bool] = False
68
 
69
  async def fetch_response_stream(client, url, headers, payload):
70
  async with client.stream('POST', url, headers=headers, json=payload) as response:
@@ -72,15 +73,7 @@ async def fetch_response_stream(client, url, headers, payload):
72
  yield chunk
73
 
74
  async def fetch_response(client, url, headers, payload):
75
- # request_info = {
76
- # "url": url,
77
- # "headers": headers,
78
- # "payload": payload
79
- # }
80
- # print(f"Request details: {json.dumps(request_info, indent=2, ensure_ascii=False)}")
81
-
82
  response = await client.post(url, headers=headers, json=payload)
83
- # print(response.text)
84
  return response.json()
85
 
86
  async def process_request(request: RequestModel, provider: Dict):
@@ -106,6 +99,7 @@ async def process_request(request: RequestModel, provider: Dict):
106
  }
107
 
108
  # 只有当相应参数存在且不为None时,才添加到payload中
 
109
  if request.stream is not None:
110
  payload["stream"] = request.stream
111
  if request.include_usage is not None:
@@ -119,6 +113,12 @@ async def process_request(request: RequestModel, provider: Dict):
119
  if request.top_logprobs is not None:
120
  payload["top_logprobs"] = request.top_logprobs
121
 
 
 
 
 
 
 
122
  if request.stream:
123
  return StreamingResponse(fetch_response_stream(app.state.client, url, headers, payload), media_type="text/event-stream")
124
  else:
 
1
  import os
2
+ import json
3
  import httpx
4
  import yaml
5
  from contextlib import asynccontextmanager
 
62
  class RequestModel(BaseModel):
63
  model: str
64
  messages: List[Message]
65
+ logprobs: Optional[bool] = None
66
  top_logprobs: Optional[int] = None
67
+ stream: Optional[bool] = None
68
+ include_usage: Optional[bool] = None
69
 
70
  async def fetch_response_stream(client, url, headers, payload):
71
  async with client.stream('POST', url, headers=headers, json=payload) as response:
 
73
  yield chunk
74
 
75
  async def fetch_response(client, url, headers, payload):
 
 
 
 
 
 
 
76
  response = await client.post(url, headers=headers, json=payload)
 
77
  return response.json()
78
 
79
  async def process_request(request: RequestModel, provider: Dict):
 
99
  }
100
 
101
  # 只有当相应参数存在且不为None时,才添加到payload中
102
+ print("request: ", request)
103
  if request.stream is not None:
104
  payload["stream"] = request.stream
105
  if request.include_usage is not None:
 
113
  if request.top_logprobs is not None:
114
  payload["top_logprobs"] = request.top_logprobs
115
 
116
+ # request_info = {
117
+ # "url": url,
118
+ # "headers": headers,
119
+ # "payload": payload
120
+ # }
121
+ # print(f"Request details: {json.dumps(request_info, indent=2, ensure_ascii=False)}")
122
  if request.stream:
123
  return StreamingResponse(fetch_response_stream(app.state.client, url, headers, payload), media_type="text/event-stream")
124
  else: