Spaces:
Sleeping
Sleeping
BrewLA
commited on
Commit
·
3d750bf
1
Parent(s):
1b0b385
new structure
Browse files- main.py +18 -12
- requirements.txt +2 -1
main.py
CHANGED
@@ -1,23 +1,23 @@
|
|
1 |
-
from fastapi import FastAPI
|
2 |
from fastapi.middleware.cors import CORSMiddleware
|
3 |
from transformers import pipeline
|
4 |
-
import
|
5 |
import random
|
6 |
|
7 |
app = FastAPI()
|
8 |
|
|
|
9 |
origins = [
|
10 |
"https://bible-affirmations.vercel.app", # Replace with your Vercel frontend URL
|
11 |
"http://localhost:3000", # For local testing
|
12 |
]
|
13 |
|
14 |
-
# Allow all origins for CORS (adjust as per your security requirements)
|
15 |
app.add_middleware(
|
16 |
CORSMiddleware,
|
17 |
-
allow_origins=
|
18 |
allow_credentials=True,
|
19 |
-
allow_methods=["POST"],
|
20 |
-
allow_headers=["*"],
|
21 |
)
|
22 |
|
23 |
# Initialize the sentiment analysis pipeline
|
@@ -35,6 +35,15 @@ bible_verses = {
|
|
35 |
]
|
36 |
}
|
37 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
38 |
def get_feeling_classification(feeling_text):
|
39 |
# Use the sentiment analysis pipeline to classify the feeling
|
40 |
result = sentiment_analyzer(feeling_text)[0]
|
@@ -64,12 +73,9 @@ def generate_response(emotion_label):
|
|
64 |
async def home():
|
65 |
return {"message": "Hello World"}
|
66 |
|
67 |
-
@app.post("/api/home")
|
68 |
-
async def home_post(
|
69 |
-
label, score = get_feeling_classification(feeling)
|
70 |
bible_verse = get_bible_verse(label)
|
71 |
response_message = generate_response(label)
|
72 |
return {"label": label, "score": score, "verse": bible_verse, "response_message": response_message}
|
73 |
-
|
74 |
-
if __name__ == "__main__":
|
75 |
-
uvicorn.run(app, host="0.0.0.0", port=8080)
|
|
|
1 |
+
from fastapi import FastAPI, HTTPException
|
2 |
from fastapi.middleware.cors import CORSMiddleware
|
3 |
from transformers import pipeline
|
4 |
+
from pydantic import BaseModel
|
5 |
import random
|
6 |
|
7 |
app = FastAPI()
|
8 |
|
9 |
+
# Configure CORS
|
10 |
origins = [
|
11 |
"https://bible-affirmations.vercel.app", # Replace with your Vercel frontend URL
|
12 |
"http://localhost:3000", # For local testing
|
13 |
]
|
14 |
|
|
|
15 |
app.add_middleware(
|
16 |
CORSMiddleware,
|
17 |
+
allow_origins=origins,
|
18 |
allow_credentials=True,
|
19 |
+
allow_methods=["POST"],
|
20 |
+
allow_headers=["*"],
|
21 |
)
|
22 |
|
23 |
# Initialize the sentiment analysis pipeline
|
|
|
35 |
]
|
36 |
}
|
37 |
|
38 |
+
class FeelingRequest(BaseModel):
|
39 |
+
feeling: str
|
40 |
+
|
41 |
+
class AffirmationResponse(BaseModel):
|
42 |
+
label: str
|
43 |
+
score: float
|
44 |
+
verse: str
|
45 |
+
response_message: str
|
46 |
+
|
47 |
def get_feeling_classification(feeling_text):
|
48 |
# Use the sentiment analysis pipeline to classify the feeling
|
49 |
result = sentiment_analyzer(feeling_text)[0]
|
|
|
73 |
async def home():
|
74 |
return {"message": "Hello World"}
|
75 |
|
76 |
+
@app.post("/api/home", response_model=AffirmationResponse)
|
77 |
+
async def home_post(feeling_request: FeelingRequest):
|
78 |
+
label, score = get_feeling_classification(feeling_request.feeling)
|
79 |
bible_verse = get_bible_verse(label)
|
80 |
response_message = generate_response(label)
|
81 |
return {"label": label, "score": score, "verse": bible_verse, "response_message": response_message}
|
|
|
|
|
|
requirements.txt
CHANGED
@@ -3,5 +3,6 @@ fastapi[all]
|
|
3 |
uvicorn
|
4 |
transformers
|
5 |
tf-keras
|
|
|
6 |
torch
|
7 |
-
|
|
|
3 |
uvicorn
|
4 |
transformers
|
5 |
tf-keras
|
6 |
+
tensorflow
|
7 |
torch
|
8 |
+
pydantic
|