Add model
Browse files- open_clip_config.json +1 -2
- open_clip_model.safetensors +1 -1
- open_clip_pytorch_model.bin +1 -1
open_clip_config.json
CHANGED
@@ -12,8 +12,7 @@
|
|
12 |
"vocab_size": 49408,
|
13 |
"width": 768,
|
14 |
"heads": 12,
|
15 |
-
"layers": 12
|
16 |
-
"hf_tokenizer_name": "JianLiao/CLIP-ViT-L-14-spectrum-icons-20k"
|
17 |
}
|
18 |
},
|
19 |
"preprocess_cfg": {
|
|
|
12 |
"vocab_size": 49408,
|
13 |
"width": 768,
|
14 |
"heads": 12,
|
15 |
+
"layers": 12
|
|
|
16 |
}
|
17 |
},
|
18 |
"preprocess_cfg": {
|
open_clip_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1710517724
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3fe7ffe6fd973050de9a5b35abfbd749eabe4414bbc8dde4d9784afed0ab698b
|
3 |
size 1710517724
|
open_clip_pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1710639510
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9a7c25c71751445939b01477be89f1ec944159010cfdcf0af80a437f60f67d02
|
3 |
size 1710639510
|