@article{Kibria_Yousuf_2021, title={Context-driven Bengali Text Generation using Conditional Language Model}, volume={9}, url={http://iapress.org/index.php/soic/article/view/1061}, DOI={10.19139/soic-2310-5070-1061}, abstractNote={<p>Text generation is a rapidly evolving field of Natural Language Processing (NLP) with larger Language models proposed very often setting new state-of-the-art. These models are exorbitantly effective in learning the representation of words and their internal coherence in a particular language. However, an established context-driven, end to end text generation model is very rare, even more so for the Bengali language. In this paper, we have proposed a Bidirectional gated recurrent unit (GRU) based architecture that simulates the conditional language model or the decoder portion of the sequence to sequence (seq2seq) model and is further conditioned upon the target context vectors. We have explored several ways of combining multiple context words into a fixed dimensional vector representation that is extracted from the same GloVe language model which is used to generate the embedding matrix. We have used beam search optimization to generate the sentence with the maximum cumulative log probability score. In addition, we have proposed a human scoring based evaluation metric and used it to compare the performance of the model with unidirectional LSTM and GRU networks. Empirical results prove that the proposed model performs exceedingly well in producing meaningful outcomes depicting the target context. The experiment leads to an architecture that can be applied to an extensive domain of context-driven text generation based applications and which is also a key contribution to the NLP based literature of the Bengali language.</p&gt;}, number={2}, journal={Statistics, Optimization & Information Computing}, author={Kibria, Md. Raisul and Yousuf, Mohammad Abu}, year={2021}, month={Mar.}, pages={334-350} }