サーベイ: Megatron-LM: Training Multi-Billion Parameter Language Models Using Model Parallelism

@article{shoeybi2019megatron, title={Megatron-lm: Training multi-billion parameter language models using model parallelism}, author={Shoeybi, Mohammad and Patwary, Mostofa and Puri, Raul and LeGresley, Patrick and Casper, Jared and Catanzaro, Bryan}, journal={arXiv preprint arXiv:1909.08053}, year=…