@inproceedings{Li-acl-23,
abstract = {With the evolution of Knowledge Graphs (KGs), new entities emerge which are not seen before. Representation learning of KGs in such an inductive setting aims to capture and transfer the structural patterns from existing entities to new entities. However, the performance of existing methods in inductive KGs are limited by sparsity and implicit transfer. In this paper, we propose VMCL, a Contrastive Learning (CL) framework with graph guided Variational autoencoder on Meta-KGs in the inductive setting. We first propose representation generation to capture the encoded and generated representations of entities, where the generated variations can densify representations with complementary features. Then, we design two CL objectives that work across entities and meta-KGs to simulate the transfer mode. With extensive experiments we demonstrate that our proposed VMCL can significantly outperform previous state-of-the-art baselines.},
address = {Toronto, Canada},
author = {Qian Li and Shafiq Joty and Daling Wang and Shi Feng and Yifei Zhang and Chengwei Qin},
booktitle = {Findings of the 61st Annual Meeting of the Association for Computational Linguistics},
publisher = {ACL},
series = {ACL'23 Findings},
title = {Contrastive Learning with Generated Representations for Inductive Knowledge Graph Embedding},
url = {},
year = {2023}
}