Update README.md
Browse files
README.md
CHANGED
@@ -170,6 +170,6 @@ Our paper, detailing the efficient training methods for MoE models using Scale-U
|
|
170 |
@article{AquilaMoE2024,
|
171 |
title={AquilaMoE: Efficient Training for MoE Models with Scale-Up and Scale-Out Strategies},
|
172 |
author={{Language Foundation Model \& Software Team, Beijing Academy of Artificial Intelligence (BAAI)}},
|
173 |
-
journal={arXiv preprint arXiv:
|
174 |
year={2024}
|
175 |
}
|
|
|
170 |
@article{AquilaMoE2024,
|
171 |
title={AquilaMoE: Efficient Training for MoE Models with Scale-Up and Scale-Out Strategies},
|
172 |
author={{Language Foundation Model \& Software Team, Beijing Academy of Artificial Intelligence (BAAI)}},
|
173 |
+
journal={arXiv preprint arXiv:arxiv.org/abs/2408.06567},
|
174 |
year={2024}
|
175 |
}
|