\begin{thebibliography}{10}

\bibitem{Kaggle}
Kaggle diabetic retinopathy dataset, howpublished =
  {\url{https://www.kaggle.com/c/diabetic-retinopathy-detection/data}},.

\bibitem{DBLP:journals/corr/abs-1803-08375}
Abien~Fred Agarap.
\newblock Deep learning using rectified linear units (relu).
\newblock {\em CoRR}, abs/1803.08375, 2018.

\bibitem{Clevert2016FastAA}
Djork-Arn{\'e} Clevert, Thomas Unterthiner, and S.~Hochreiter.
\newblock Fast and accurate deep network learning by exponential linear units
  (elus).
\newblock {\em arXiv: Learning}, 2016.

\bibitem{dubowski2020activation}
Adam Dubowski.
\newblock Activation function impact on sparse neural networks.
\newblock {\em arXiv preprint arXiv:2010.05943}, 2020.

\bibitem{DBLP:journals/corr/KlambauerUMH17}
G{\"{u}}nter Klambauer, Thomas Unterthiner, Andreas Mayr, and Sepp Hochreiter.
\newblock Self-normalizing neural networks.
\newblock {\em CoRR}, abs/1706.02515, 2017.

\bibitem{krizhevsky2009learning}
Alex Krizhevsky et~al.
\newblock Learning multiple layers of features from tiny images, 2009.

\bibitem{DBLP:journals/corr/abs-1908-08681}
Diganta Misra.
\newblock Mish: {A} self regularized non-monotonic neural activation function.
\newblock {\em CoRR}, abs/1908.08681, 2019.

\bibitem{noel2021growing}
Mathew~Mithra Noel, Arunkumar L, Advait Trivedi, and Praneet Dutta.
\newblock Growing cosine unit: A novel oscillatory activation function that can
  speedup training and reduce parameters in convolutional neural networks,
  2021.

\bibitem{noel2021biologically}
Matthew~Mithra Noel, Shubham Bharadwaj, Venkataraman Muthiah-Nakarajan, Praneet
  Dutta, and Geraldine~Bessie Amali.
\newblock Biologically inspired oscillating activation functions can bridge the
  performance gap between biological and artificial neurons, 2021.

\bibitem{DBLP:journals/corr/abs-1710-05941}
Prajit Ramachandran, Barret Zoph, and Quoc~V. Le.
\newblock Searching for activation functions.
\newblock {\em CoRR}, abs/1710.05941, 2017.

\end{thebibliography}
