本项目中所使用的微调代码可通过以下链接获取:
特别鸣谢 Llama-Factory 项目的开源贡献,该工作极大地简化了对100多种语言模型的微调流程,为研究和应用llama
系列模型提供了强大的支持与便利!
@article{zheng2024llamafactory,
title = {LlamaFactory: Unified Efficient Fine-Tuning of 100+ Language Models},
author = {Yaowei Zheng and Richong Zhang and Junhao Zhang and Yanhan Ye and Zheyan Luo and Yongqiang Ma},
journal = {arXiv preprint arXiv:2403.13372},
year = {2024},
url = {http://arxiv.org/abs/2403.13372}
}