WASET
	@article{(Open Science Index):https://publications.waset.org/pdf/10013271,
	  title     = {Shifted Window Based Self-Attention via Swin Transformer for Zero-Shot Learning},
	  author    = {Yasaswi Palagummi and  Sareh Rowlands},
	  country	= {},
	  institution	= {},
	  abstract     = {Generalised Zero-Shot Learning, often known as GZSL, is an advanced variant of zero-shot learning in which the samples in the unseen category may be either seen or unseen. GZSL methods typically have a bias towards the seen classes because they learn a model to perform recognition for both the seen and unseen classes using data samples from the seen classes. This frequently leads to the misclassification of data from the unseen classes into the seen classes, making the task of GZSL more challenging. In this work, we propose an approach leveraging the Shifted Window based Self-Attention in the Swin Transformer (Swin-GZSL) to work in the inductive GZSL problem setting. We run experiments on three popular benchmark datasets: CUB, SUN, and AWA2, which are specifically used for ZSL and its other variants. The results show that our model based on Swin Transformer has achieved state-of-the-art harmonic mean for two datasets - AWA2 and SUN and near-state-of-the-art for the other dataset - CUB. More importantly, this technique has a linear computational complexity, which reduces training time significantly. We have also observed less bias than most of the existing GZSL models.},
	    journal   = {International Journal of Computer and Information Engineering},
	  volume    = {17},
	  number    = {10},
	  year      = {2023},
	  pages     = {524 - 531},
	  ee        = {https://publications.waset.org/pdf/10013271},
	  url   	= {https://publications.waset.org/vol/202},
	  bibsource = {https://publications.waset.org/},
	  issn  	= {eISSN: 1307-6892},
	  publisher = {World Academy of Science, Engineering and Technology},
	  index 	= {Open Science Index 202, 2023},
	}