|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
from openai import ( |
|
AuthenticationError, |
|
BadRequestError, |
|
RateLimitError, |
|
APIStatusError, |
|
OpenAIError, |
|
APIError, |
|
APITimeoutError, |
|
APIConnectionError, |
|
APIResponseValidationError |
|
) |
|
import httpx |
|
|
|
class AuthenticationError(AuthenticationError): |
|
def __init__(self, message, llm_provider, model, response: httpx.Response): |
|
self.status_code = 401 |
|
self.message = message |
|
self.llm_provider = llm_provider |
|
self.model = model |
|
super().__init__( |
|
self.message, |
|
response=response, |
|
body=None |
|
) |
|
|
|
class BadRequestError(BadRequestError): |
|
def __init__(self, message, model, llm_provider, response: httpx.Response): |
|
self.status_code = 400 |
|
self.message = message |
|
self.model = model |
|
self.llm_provider = llm_provider |
|
super().__init__( |
|
self.message, |
|
response=response, |
|
body=None |
|
) |
|
|
|
class Timeout(APITimeoutError): |
|
def __init__(self, message, model, llm_provider): |
|
self.status_code = 408 |
|
self.message = message |
|
self.model = model |
|
self.llm_provider = llm_provider |
|
request = httpx.Request(method="POST", url="https://api.openai.com/v1") |
|
super().__init__( |
|
request=request |
|
) |
|
|
|
class RateLimitError(RateLimitError): |
|
def __init__(self, message, llm_provider, model, response: httpx.Response): |
|
self.status_code = 429 |
|
self.message = message |
|
self.llm_provider = llm_provider |
|
self.modle = model |
|
super().__init__( |
|
self.message, |
|
response=response, |
|
body=None |
|
) |
|
|
|
|
|
class ContextWindowExceededError(BadRequestError): |
|
def __init__(self, message, model, llm_provider, response: httpx.Response): |
|
self.status_code = 400 |
|
self.message = message |
|
self.model = model |
|
self.llm_provider = llm_provider |
|
super().__init__( |
|
message=self.message, |
|
model=self.model, |
|
llm_provider=self.llm_provider, |
|
response=response |
|
) |
|
|
|
class ServiceUnavailableError(APIStatusError): |
|
def __init__(self, message, llm_provider, model, response: httpx.Response): |
|
self.status_code = 503 |
|
self.message = message |
|
self.llm_provider = llm_provider |
|
self.model = model |
|
super().__init__( |
|
self.message, |
|
response=response, |
|
body=None |
|
) |
|
|
|
|
|
|
|
class APIError(APIError): |
|
def __init__(self, status_code, message, llm_provider, model, request: httpx.Request): |
|
self.status_code = status_code |
|
self.message = message |
|
self.llm_provider = llm_provider |
|
self.model = model |
|
super().__init__( |
|
self.message, |
|
request=request, |
|
body=None |
|
) |
|
|
|
|
|
class APIConnectionError(APIConnectionError): |
|
def __init__(self, message, llm_provider, model, request: httpx.Request): |
|
self.message = message |
|
self.llm_provider = llm_provider |
|
self.model = model |
|
self.status_code = 500 |
|
super().__init__( |
|
message=self.message, |
|
request=request |
|
) |
|
|
|
|
|
class APIResponseValidationError(APIResponseValidationError): |
|
def __init__(self, message, llm_provider, model): |
|
self.message = message |
|
self.llm_provider = llm_provider |
|
self.model = model |
|
request = httpx.Request(method="POST", url="https://api.openai.com/v1") |
|
response = httpx.Response(status_code=500, request=request) |
|
super().__init__( |
|
response=response, |
|
body=None, |
|
message=message |
|
) |
|
|
|
class OpenAIError(OpenAIError): |
|
def __init__(self, original_exception): |
|
self.status_code = original_exception.http_status |
|
super().__init__( |
|
http_body=original_exception.http_body, |
|
http_status=original_exception.http_status, |
|
json_body=original_exception.json_body, |
|
headers=original_exception.headers, |
|
code=original_exception.code, |
|
) |
|
self.llm_provider = "openai" |
|
|
|
class BudgetExceededError(Exception): |
|
def __init__(self, current_cost, max_budget): |
|
self.current_cost = current_cost |
|
self.max_budget = max_budget |
|
message = f"Budget has been exceeded! Current cost: {current_cost}, Max budget: {max_budget}" |
|
super().__init__(message) |
|
|
|
|
|
class InvalidRequestError(BadRequestError): |
|
def __init__(self, message, model, llm_provider): |
|
self.status_code = 400 |
|
self.message = message |
|
self.model = model |
|
self.llm_provider = llm_provider |
|
super().__init__( |
|
self.message, f"{self.model}" |
|
) |
|
|