Ofir ZafrirAriel LareyGuy BoudoukhHaihao ShenMoshe Wasserblat
@article{zafrir2021prune, title={Prune Once for All: Sparse Pre-Trained Language Models}, author={Zafrir, Ofir and Larey, Ariel and Boudoukh, Guy and Shen, Haihao and Wasserblat, Moshe}, journal={arXiv preprint arXiv:2111.05754}, year={2021} }Vithursan ThangarasaMahmoud SalemShreyas SaxenaChen-Yu LeongJoel HestnessSean Lie
Ning DingXingtai LvQiaosen WangYulin ChenBowen ZhouZhiyuan LiuMaosong Sun