@article{joty-durrani-sajjad-abdelali-csl-17,
abstract = {We explore neural joint models for the task of domain adaptation in machine translation in two ways: (i) we apply state-of-the-art domain adaptation techniques, such as mixture modelling and data selection using the recently proposed Neural Network Joint Model (NNJM) (Devlin et al., 2014); (ii) we propose two novel approaches to perform adaptation through instance weighting and weight readjustment in the NNJM framework. In our first approach, we propose a pair of models called Neural Domain Adaptation Models (NDAM) that minimizes the cross entropy by regularizing the loss function with respect to in-domain (and optionally to out-domain) model. In the second approach, we present a set of Neural Fusion Models (NFM) that combines the in- and the out-domain models by readjusting their parameters based on the in-domain data.
We evaluated our models on the standard task of translating English-to-German and Arabic-to-English TED talks. The NDAM models achieved better perplexities and modest BLEU improvements compared to the baseline NNJM, trained either on in-domain or on a concatenation of in- and out-domain data. On the other hand, the NFM models obtained significant improvements of up to +0.9 and +0.7 BLEU points, respectively. We also demonstrate improvements over existing adaptation methods such as instance weighting, phrasetable fill-up, linear and log-linear interpolations.},
author = {Shafiq Joty and Nadir Durrani and Hassan Sajjad and Ahmed Abdelali},
doi = {https://doi.org/10.1016/j.csl.2016.12.006},
issn = {0885-2308},
journal = {Computer Speech & Language (Special Issue on Deep Learning for Machine Translation)},
pages = {161-179},
publisher = {Elsevier},
title = {{Domain Adaptation Using Neural Network Joint Model}},
url = {http://www.sciencedirect.com/science/article/pii/S0885230816301474},
volume = {45:C},
year = {2017}
}