WASET
	@article{(Open Science Index):https://publications.waset.org/pdf/10009954,
	  title     = {Comparison between XGBoost, LightGBM and CatBoost Using a Home Credit Dataset},
	  author    = {Essam Al Daoud},
	  country	= {},
	  institution	= {},
	  abstract     = {Gradient boosting methods have been proven to be a very important strategy. Many successful machine learning solutions were developed using the XGBoost and its derivatives. The aim of this study is to investigate and compare the efficiency of three gradient methods. Home credit dataset is used in this work which contains 219 features and 356251 records. However, new features are generated and several techniques are used to rank and select the best features. The implementation indicates that the LightGBM is faster and more accurate than CatBoost and XGBoost using variant number of features and records.
},
	    journal   = {International Journal of Computer and Information Engineering},
	  volume    = {13},
	  number    = {1},
	  year      = {2019},
	  pages     = {6 - 10},
	  ee        = {https://publications.waset.org/pdf/10009954},
	  url   	= {https://publications.waset.org/vol/145},
	  bibsource = {https://publications.waset.org/},
	  issn  	= {eISSN: 1307-6892},
	  publisher = {World Academy of Science, Engineering and Technology},
	  index 	= {Open Science Index 145, 2019},
	}