begumcig commited on
Commit
2cc9a24
·
verified ·
1 Parent(s): 65d5dfd

Upload folder using huggingface_hub (#3)

Browse files

- 795cd8dc0d5b583a2bb830fae183bc2da386270f387239dc372ab25a373bf172 (9076287ca01551cbaf73b9cfab1dee08ba4aced0)

Files changed (3) hide show
  1. base_results.json +14 -14
  2. plots.png +0 -0
  3. smashed_results.json +14 -14
base_results.json CHANGED
@@ -2,18 +2,18 @@
2
  "current_gpu_type": "Tesla T4",
3
  "current_gpu_total_memory": 15095.0625,
4
  "perplexity": 3.4586403369903564,
5
- "memory_inference_first": 790.0,
6
- "memory_inference": 790.0,
7
- "token_generation_latency_sync": 37.40723571777344,
8
- "token_generation_latency_async": 37.517561949789524,
9
- "token_generation_throughput_sync": 0.026732795963452235,
10
- "token_generation_throughput_async": 0.026654184014897324,
11
- "token_generation_CO2_emissions": 1.942063140736845e-05,
12
- "token_generation_energy_consumption": 0.0018457401360101422,
13
- "inference_latency_sync": 122.10714988708496,
14
- "inference_latency_async": 47.79155254364014,
15
- "inference_throughput_sync": 0.008189528630589782,
16
- "inference_throughput_async": 0.020924199921876677,
17
- "inference_CO2_emissions": 1.9370730362422426e-05,
18
- "inference_energy_consumption": 6.823837478184117e-05
19
  }
 
2
  "current_gpu_type": "Tesla T4",
3
  "current_gpu_total_memory": 15095.0625,
4
  "perplexity": 3.4586403369903564,
5
+ "memory_inference_first": 808.0,
6
+ "memory_inference": 808.0,
7
+ "token_generation_latency_sync": 36.71660461425781,
8
+ "token_generation_latency_async": 36.8287518620491,
9
+ "token_generation_throughput_sync": 0.027235633863913426,
10
+ "token_generation_throughput_async": 0.027152698623774684,
11
+ "token_generation_CO2_emissions": 1.9246105981369177e-05,
12
+ "token_generation_energy_consumption": 0.0018056219463504367,
13
+ "inference_latency_sync": 123.11312103271484,
14
+ "inference_latency_async": 47.777581214904785,
15
+ "inference_throughput_sync": 0.008122611071928475,
16
+ "inference_throughput_async": 0.020930318667702626,
17
+ "inference_CO2_emissions": 1.9401499854764428e-05,
18
+ "inference_energy_consumption": 6.645693546901914e-05
19
  }
plots.png CHANGED
smashed_results.json CHANGED
@@ -2,18 +2,18 @@
2
  "current_gpu_type": "Tesla T4",
3
  "current_gpu_total_memory": 15095.0625,
4
  "perplexity": 5.245799541473389,
5
- "memory_inference_first": 362.0,
6
- "memory_inference": 362.0,
7
- "token_generation_latency_sync": 95.34894561767578,
8
- "token_generation_latency_async": 95.61750814318657,
9
- "token_generation_throughput_sync": 0.010487792953786162,
10
- "token_generation_throughput_async": 0.010458335710887872,
11
- "token_generation_CO2_emissions": 2.0800614727756692e-05,
12
- "token_generation_energy_consumption": 0.004631853474529401,
13
- "inference_latency_sync": 146.7707092285156,
14
- "inference_latency_async": 94.17455196380615,
15
- "inference_throughput_sync": 0.006813348557463488,
16
- "inference_throughput_async": 0.010618579851426608,
17
- "inference_CO2_emissions": 2.1171634369010966e-05,
18
- "inference_energy_consumption": 7.173729223505599e-05
19
  }
 
2
  "current_gpu_type": "Tesla T4",
3
  "current_gpu_total_memory": 15095.0625,
4
  "perplexity": 5.245799541473389,
5
+ "memory_inference_first": 360.0,
6
+ "memory_inference": 360.0,
7
+ "token_generation_latency_sync": 95.35757217407226,
8
+ "token_generation_latency_async": 97.4069582298398,
9
+ "token_generation_throughput_sync": 0.010486844171897868,
10
+ "token_generation_throughput_async": 0.010266207036672031,
11
+ "token_generation_CO2_emissions": 2.0750754440553232e-05,
12
+ "token_generation_energy_consumption": 0.004657113739257539,
13
+ "inference_latency_sync": 146.8122100830078,
14
+ "inference_latency_async": 94.30098533630371,
15
+ "inference_throughput_sync": 0.006811422561070354,
16
+ "inference_throughput_async": 0.010604343066339339,
17
+ "inference_CO2_emissions": 2.085801266041447e-05,
18
+ "inference_energy_consumption": 7.147684133074615e-05
19
  }