WASET
	@article{(Open Science Index):https://publications.waset.org/pdf/10013595,
	  title     = {Experimental Study of Hyperparameter Tuning a Deep Learning Convolutional Recurrent Network for Text Classification},
	  author    = {Bharatendra Rai},
	  country	= {},
	  institution	= {},
	  abstract     = {Sequences of words in text data have long-term dependencies and are known to suffer from vanishing gradient problem when developing deep learning models. Although recurrent networks such as long short-term memory networks help overcome this problem, achieving high text classification performance is a challenging problem. Convolutional recurrent networks that combine advantages of long short-term memory networks and convolutional neural networks, can be useful for text classification performance improvements. However, arriving at suitable hyperparameter values for convolutional recurrent networks is still a challenging task where fitting of a model requires significant computing resources. This paper illustrates the advantages of using convolutional recurrent networks for text classification with the help of statistically planned computer experiments for hyperparameter tuning.  },
	    journal   = {International Journal of Electronics and Communication Engineering},
	  volume    = {18},
	  number    = {4},
	  year      = {2024},
	  pages     = {85 - 89},
	  ee        = {https://publications.waset.org/pdf/10013595},
	  url   	= {https://publications.waset.org/vol/208},
	  bibsource = {https://publications.waset.org/},
	  issn  	= {eISSN: 1307-6892},
	  publisher = {World Academy of Science, Engineering and Technology},
	  index 	= {Open Science Index 208, 2024},
	}