# import requests | |
# import json | |
# import os | |
# # Your Hugging Face model URL | |
# API_URL = "sayyedAhmed/Crisis_Severity_Predictor_LSTM" # Replace with your model's URL | |
# # Load your Hugging Face API token | |
# API_KEY = os.getenv("HF_API_KEY") # Ensure the API key is stored in the environment or replace with the actual key | |
# headers = { | |
# "Authorization": f"Bearer {API_KEY}", | |
# "Content-Type": "application/json" | |
# } | |
# payload = { | |
# "inputs": "Your test input here" # Replace this with the actual input for your model | |
# } | |
# # Make the POST request to Hugging Face Inference API | |
# response = requests.post(API_URL, headers=headers, json=payload) | |
# # Print the response (the predictions) | |
# print(response.json()) | |
from transformers import pipeline | |
# Specify the model you want to use | |
model_name = "sayyedAhmed/Crisis_Severity_Predictor_LSTM" | |
# Create the pipeline with manual framework specification (using 'tf' for TensorFlow) | |
classifier = pipeline("text-classification", model=model_name, framework="pt") | |
# Use the pipeline to run inference | |
result = classifier("Example text for classification.") | |
print(result) | |