yujiepan commited on
Commit
fa8f8db
·
verified ·
1 Parent(s): fde817e

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. README.md +1 -37
  2. config.json +1 -1
  3. model.safetensors +2 -2
README.md CHANGED
@@ -13,41 +13,5 @@ Note the model is in float16.
13
 
14
  Codes:
15
  ```python
16
- import transformers
17
- import torch
18
- import os
19
- from huggingface_hub import create_repo, upload_folder
20
-
21
- source_model_id = 'stabilityai/stablelm-2-1_6b'
22
- save_path = '/tmp/yujiepan/stablelm-2-tiny-random'
23
- repo_id = 'yujiepan/stablelm-2-tiny-random'
24
-
25
- config = transformers.AutoConfig.from_pretrained(
26
- source_model_id, trust_remote_code=True)
27
- config.hidden_size = 4
28
- config.intermediate_size = 6
29
- config.num_attention_heads = 4
30
- config.num_hidden_layers = 2
31
- config.num_key_value_heads = 2
32
- config.torch_dtype = torch.float16
33
-
34
- model = transformers.AutoModelForCausalLM.from_config(
35
- config, trust_remote_code=True, torch_dtype=torch.float16)
36
- model = model.half()
37
- print(next(iter(model.parameters())).dtype)
38
- model.config.torch_dtype = torch.float16
39
-
40
- tokenizer = transformers.AutoTokenizer.from_pretrained(
41
- source_model_id, trust_remote_code=True)
42
-
43
- result = transformers.pipelines.pipeline(
44
- 'text-generation',
45
- model=model.float(), tokenizer=tokenizer)('Hello World!')
46
- print(result)
47
-
48
- model.save_pretrained(save_path)
49
- tokenizer.save_pretrained(save_path)
50
- os.system(f'ls -alh {save_path}')
51
- # create_repo(repo_id, exist_ok=True)
52
- # upload_folder(repo_id=repo_id, folder_path=save_path)
53
  ```
 
13
 
14
  Codes:
15
  ```python
16
+ model.config
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  ```
config.json CHANGED
@@ -21,7 +21,7 @@
21
  "rope_scaling": null,
22
  "rope_theta": 10000,
23
  "tie_word_embeddings": false,
24
- "torch_dtype": "float32",
25
  "transformers_version": "4.39.0",
26
  "use_cache": true,
27
  "use_qkv_bias": true,
 
21
  "rope_scaling": null,
22
  "rope_theta": 10000,
23
  "tie_word_embeddings": false,
24
+ "torch_dtype": "float16",
25
  "transformers_version": "4.39.0",
26
  "use_cache": true,
27
  "use_qkv_bias": true,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0b31730a65c0a5975358dce3ab7fa6e96aeff6acd6bdcdabf75dc60ada7644a8
3
- size 3215696
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:00257f3077b772910186736a9f5760978bbcfacca5e20ceb6ad6af885294be80
3
+ size 1609472