Update app.py
Browse files
app.py
CHANGED
@@ -4,16 +4,13 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
|
|
4 |
import torch
|
5 |
from fastapi.middleware.cors import CORSMiddleware
|
6 |
import logging
|
7 |
-
import os
|
8 |
from huggingface_hub import HfFolder
|
9 |
|
10 |
# Configure logging
|
11 |
logging.basicConfig(level=logging.INFO)
|
12 |
logger = logging.getLogger(__name__)
|
13 |
|
14 |
-
|
15 |
-
if not hf_token:
|
16 |
-
raise ValueError("Hugging Face API token is not set. Please set the HF_API_TOKEN environment variable.")
|
17 |
|
18 |
app = FastAPI()
|
19 |
|
@@ -30,8 +27,8 @@ app.add_middleware(
|
|
30 |
MODEL_NAME = "aubmindlab/aragpt2-medium"
|
31 |
|
32 |
try:
|
33 |
-
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME
|
34 |
-
model = AutoModelForCausalLM.from_pretrained(MODEL_NAME
|
35 |
except Exception as e:
|
36 |
logger.error(f"Failed to load model or tokenizer: {str(e)}")
|
37 |
raise RuntimeError(f"Failed to load model or tokenizer: {str(e)}")
|
|
|
4 |
import torch
|
5 |
from fastapi.middleware.cors import CORSMiddleware
|
6 |
import logging
|
|
|
7 |
from huggingface_hub import HfFolder
|
8 |
|
9 |
# Configure logging
|
10 |
logging.basicConfig(level=logging.INFO)
|
11 |
logger = logging.getLogger(__name__)
|
12 |
|
13 |
+
|
|
|
|
|
14 |
|
15 |
app = FastAPI()
|
16 |
|
|
|
27 |
MODEL_NAME = "aubmindlab/aragpt2-medium"
|
28 |
|
29 |
try:
|
30 |
+
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
|
31 |
+
model = AutoModelForCausalLM.from_pretrained(MODEL_NAME)
|
32 |
except Exception as e:
|
33 |
logger.error(f"Failed to load model or tokenizer: {str(e)}")
|
34 |
raise RuntimeError(f"Failed to load model or tokenizer: {str(e)}")
|