@inproceedings{Yue-emnlp-21,
abstract = {Pre-trained models for Natural Languages (NL) like BERT and GPT have been recently shown to transfer well to Programming Languages (PL) and largely benefit a broad set of code-related tasks. Despite their success, most current methods either rely on an encoder-only (or decoder-only) pre-training that is suboptimal for generation (resp. understanding) tasks or process the code snippet in the same way as NL, neglecting the special characteristics of PL such as token types. We present CodeT5, a unified pre-trained encoder-decoder Transformer model that better leverages the code semantics conveyed from the developer-assigned identifiers. Our model is unified in that it builds on a unified framework to seamlessly support both code understanding and generation tasks, and it employs a unified format of task control codes to allow for multi-task learning. We propose a novel identifier-aware pre-training objective that enables the model to distinguish which code tokens are identifiers and to recover them when they are masked. To further close the gap between the pre-training and fine-tuning, we propose a bimodal dual generation task to encourage the alignment between NL and PL. Comprehensive experiments show that CodeT5 significantly outperforms prior methods on understanding tasks such as code defect detection and clone detection, and generation tasks across various directions including PL-NL, NL-PL, and PL-PL.
Further analysis reveals that our model can better capture semantic information from code.},
address = {Online},
author = {Yue Wang and Weishi Wang and Shafiq Joty and Steven Hoi},
booktitle = {the 2021 Conference on Empirical Methods in Natural Language Processing},
publisher = {ACL},
series = {EMNLP'21},
title = {CodeT5: Identifier-aware Unified Pre-trained Encoder-Decoder Models for Code Understanding and Generation},
url = {https://aclanthology.org/2021.emnlp-main.685/},
year = {2021}
}