{ | |
"_name_or_path": "2ndSetAI/clip-from-unclip", | |
"architectures": [ | |
"CLIPModel" | |
], | |
"initializer_factor": 1.0, | |
"logit_scale_init_value": 2.6592, | |
"model_type": "clip", | |
"projection_dim": 512, | |
"text_config": { | |
"_name_or_path": "/root/.cache/huggingface/hub/models--stabilityai--stable-diffusion-2-1-unclip/snapshots/e99f66a92bdcd1b0fb0d4b6a9b81b3b37d8bea44/text_encoder", | |
"architectures": [ | |
"CLIPTextModel" | |
], | |
"bos_token_id": 0, | |
"dropout": 0.0, | |
"eos_token_id": 2, | |
"hidden_act": "gelu", | |
"hidden_size": 1024, | |
"intermediate_size": 4096, | |
"model_type": "clip_text_model", | |
"num_attention_heads": 16, | |
"num_hidden_layers": 23, | |
"torch_dtype": "float16" | |
}, | |
"torch_dtype": "float32", | |
"transformers_version": "4.41.1", | |
"vision_config": { | |
"_name_or_path": "/root/.cache/huggingface/hub/models--stabilityai--stable-diffusion-2-1-unclip/snapshots/e99f66a92bdcd1b0fb0d4b6a9b81b3b37d8bea44/image_encoder", | |
"architectures": [ | |
"CLIPVisionModelWithProjection" | |
], | |
"dropout": 0.0, | |
"hidden_act": "gelu", | |
"hidden_size": 1280, | |
"intermediate_size": 5120, | |
"model_type": "clip_vision_model", | |
"num_attention_heads": 16, | |
"num_hidden_layers": 32, | |
"patch_size": 14, | |
"projection_dim": 1024, | |
"torch_dtype": "float16" | |
} | |
} | |