hsuvaskakoty
commited on
Upload folder using huggingface_hub
Browse files- bert-base-uncased_classification_report_with_val.csv +7 -0
- bert-base-uncased_confusion_matrix_with_val.png +0 -0
- bert-base-uncased_predictions_with_val.csv +0 -0
- bert-base-uncased_wikidata_prop_label_removed_metrics.json +1 -0
- events.out.tfevents.1731602545.hsuvaspc.359515.0 +3 -0
- events.out.tfevents.1731602738.hsuvaspc.359515.1 +3 -0
- events.out.tfevents.1731684846.hsuvaspc.527949.0 +3 -0
- events.out.tfevents.1731685324.hsuvaspc.527949.1 +3 -0
- events.out.tfevents.1731685414.hsuvaspc.529421.0 +3 -0
- model.safetensors +1 -1
- roberta-base_classification_report_with_val.csv +7 -0
- roberta-base_confusion_matrix_with_val.png +0 -0
- roberta-base_predictions_with_val.csv +0 -0
- roberta-base_wikidata_prop_label_removed_metrics.json +1 -0
- tokenizer.json +16 -2
- training_args.bin +3 -0
- vocab.txt +0 -0
bert-base-uncased_classification_report_with_val.csv
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
,precision,recall,f1-score,support
|
2 |
+
deleted,0.9,0.9230769230769231,0.9113924050632911,78.0
|
3 |
+
keep,0.0,0.0,0.0,8.0
|
4 |
+
no_consensus,0.375,0.5454545454545454,0.4444444444444444,11.0
|
5 |
+
accuracy,0.8041237113402062,0.8041237113402062,0.8041237113402062,0.8041237113402062
|
6 |
+
macro avg,0.425,0.48951048951048953,0.45194561650257853,97.0
|
7 |
+
weighted avg,0.7662371134020619,0.8041237113402062,0.7832731596270679,97.0
|
bert-base-uncased_confusion_matrix_with_val.png
ADDED
bert-base-uncased_predictions_with_val.csv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
bert-base-uncased_wikidata_prop_label_removed_metrics.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"accuracy": 0.8041237113402062, "precision": 0.425, "recall": 0.48951048951048953, "f1": 0.45194561650257853}
|
events.out.tfevents.1731602545.hsuvaspc.359515.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cd20851bb798561cca51a8692ba515cfbf9cac5fe6919a2cf23fb5da420cd832
|
3 |
+
size 35733
|
events.out.tfevents.1731602738.hsuvaspc.359515.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:de75e6189b438876c62372afc55729416484547ace1c9a817032cb0acb4aad6f
|
3 |
+
size 1058
|
events.out.tfevents.1731684846.hsuvaspc.527949.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fa17859119e409b25fb66e87ba482377f0b322f7fba7985e68f9a65ab27dad27
|
3 |
+
size 31408
|
events.out.tfevents.1731685324.hsuvaspc.527949.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:77b1fa51bcf4bb68431ae9cc86bbb422c7474b0a550ab603a9e8fe1a111658e9
|
3 |
+
size 1058
|
events.out.tfevents.1731685414.hsuvaspc.529421.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:38b91259a2c102498174cc6d9784721f31464027d44e3d90b092246a688983a7
|
3 |
+
size 31418
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1421499516
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:53901f6a00d863ce4faff50737d3ef8280f1174114c071e66df204957049138b
|
3 |
size 1421499516
|
roberta-base_classification_report_with_val.csv
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
,precision,recall,f1-score,support
|
2 |
+
deleted,0.8041237113402062,1.0,0.8914285714285715,78.0
|
3 |
+
keep,0.0,0.0,0.0,8.0
|
4 |
+
no_consensus,0.0,0.0,0.0,11.0
|
5 |
+
accuracy,0.8041237113402062,0.8041237113402062,0.8041237113402062,0.8041237113402062
|
6 |
+
macro avg,0.26804123711340205,0.3333333333333333,0.29714285714285715,97.0
|
7 |
+
weighted avg,0.6466149431395473,0.8041237113402062,0.716818851251841,97.0
|
roberta-base_confusion_matrix_with_val.png
ADDED
roberta-base_predictions_with_val.csv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
roberta-base_wikidata_prop_label_removed_metrics.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"accuracy": 0.8041237113402062, "precision": 0.26804123711340205, "recall": 0.3333333333333333, "f1": 0.29714285714285715}
|
tokenizer.json
CHANGED
@@ -1,7 +1,21 @@
|
|
1 |
{
|
2 |
"version": "1.0",
|
3 |
-
"truncation":
|
4 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
"added_tokens": [
|
6 |
{
|
7 |
"id": 0,
|
|
|
1 |
{
|
2 |
"version": "1.0",
|
3 |
+
"truncation": {
|
4 |
+
"direction": "Right",
|
5 |
+
"max_length": 512,
|
6 |
+
"strategy": "LongestFirst",
|
7 |
+
"stride": 0
|
8 |
+
},
|
9 |
+
"padding": {
|
10 |
+
"strategy": {
|
11 |
+
"Fixed": 512
|
12 |
+
},
|
13 |
+
"direction": "Right",
|
14 |
+
"pad_to_multiple_of": null,
|
15 |
+
"pad_id": 1,
|
16 |
+
"pad_type_id": 0,
|
17 |
+
"pad_token": "<pad>"
|
18 |
+
},
|
19 |
"added_tokens": [
|
20 |
{
|
21 |
"id": 0,
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9625c270b235740bb9f1c15bf13cb6ec9887882821374da82b8921259675bafc
|
3 |
+
size 5176
|
vocab.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|