TerminatorPower
commited on
Update README.md
Browse files
README.md
CHANGED
@@ -28,52 +28,7 @@ Eval train split: 0.2/0.8
|
|
28 |
- **Model type:** [Classifier]
|
29 |
- **Language(s) (NLP):** [Turkish]
|
30 |
- **License:** [MIT License]
|
31 |
-
- **Finetuned from model [optional]:** [bert-base-multilingual-uncased
|
32 |
-
|
33 |
-
|
34 |
-
## How to Get Started with the Model
|
35 |
-
|
36 |
-
Use the code below to get started with the model.
|
37 |
-
|
38 |
-
import torch
|
39 |
-
from transformers import AutoTokenizer, AutoModelForSequenceClassification
|
40 |
-
|
41 |
-
model_name = "TerminatorPower/bert-news-classif-turkish"
|
42 |
-
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
43 |
-
model = AutoModelForSequenceClassification.from_pretrained(model_name)
|
44 |
-
model.eval()
|
45 |
-
|
46 |
-
reverse_label_mapping = {
|
47 |
-
0: "label_0",
|
48 |
-
1: "label_1",
|
49 |
-
2: "label_2",
|
50 |
-
3: "label_3",
|
51 |
-
4: "label_4",
|
52 |
-
5: "label_5",
|
53 |
-
6: "label_6",
|
54 |
-
7: "label_7",
|
55 |
-
8: "label_8",
|
56 |
-
9: "label_9",
|
57 |
-
10: "label_10",
|
58 |
-
11: "label_11",
|
59 |
-
12: "siyaset" # Example: Map index 12 back to "siyaset"
|
60 |
-
}
|
61 |
-
|
62 |
-
def predict(text):
|
63 |
-
inputs = tokenizer(text, return_tensors="pt", truncation=True, padding="max_length", max_length=512)
|
64 |
-
inputs = {key: value.to("cuda" if torch.cuda.is_available() else "cpu") for key, value in inputs.items()}
|
65 |
-
model.to(inputs["input_ids"].device)
|
66 |
-
with torch.no_grad():
|
67 |
-
outputs = model(**inputs)
|
68 |
-
predictions = torch.argmax(outputs.logits, dim=1)
|
69 |
-
predicted_label = reverse_label_mapping[predictions.item()]
|
70 |
-
return predicted_label
|
71 |
-
|
72 |
-
if __name__ == "__main__":
|
73 |
-
text = "Some example news text"
|
74 |
-
print(f"Predicted label: {predict(text)}")
|
75 |
-
|
76 |
-
|
77 |
## Training Details
|
78 |
I used rtx 3060 12gb card to tain the training took 245 minutes in total
|
79 |
|
|
|
28 |
- **Model type:** [Classifier]
|
29 |
- **Language(s) (NLP):** [Turkish]
|
30 |
- **License:** [MIT License]
|
31 |
+
- **Finetuned from model [optional]:** [bert-base-multilingual-uncased]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
## Training Details
|
33 |
I used rtx 3060 12gb card to tain the training took 245 minutes in total
|
34 |
|