Spaces:
Running
Running
\begin{thebibliography}{10} | |
\providecommand{\natexlab}[1]{#1} | |
\providecommand{\url}[1]{\texttt{#1}} | |
\expandafter\ifx\csname urlstyle\endcsname\relax | |
\providecommand{\doi}[1]{doi: #1}\else | |
\providecommand{\doi}{doi: \begingroup \urlstyle{rm}\Url}\fi | |
\bibitem[Arkanath~Pathak(2023)]{2303.15533} | |
Nicholas~Dufour Arkanath~Pathak. | |
\newblock Sequential training of gans against gan-classifiers reveals | |
correlated "knowledge gaps" present among independently trained gan | |
instances. | |
\newblock \emph{arXiv preprint arXiv:2303.15533}, 2023. | |
\newblock URL \url{http://arxiv.org/abs/2303.15533v1}. | |
\bibitem[Chanwoo~Kim(2022)]{2212.14149} | |
Jinhwan Park Wonyong~Sung Chanwoo~Kim, Sathish~Indurti. | |
\newblock Macro-block dropout for improved regularization in training | |
end-to-end speech recognition models. | |
\newblock \emph{arXiv preprint arXiv:2212.14149}, 2022. | |
\newblock URL \url{http://arxiv.org/abs/2212.14149v1}. | |
\bibitem[Dian~Lei(2018)]{1805.08355} | |
Jianfei~Zhao Dian~Lei, Xiaoxiao~Chen. | |
\newblock Opening the black box of deep learning. | |
\newblock \emph{arXiv preprint arXiv:1805.08355}, 2018. | |
\newblock URL \url{http://arxiv.org/abs/1805.08355v1}. | |
\bibitem[Hyungrok~Ham(2020)]{2002.02112} | |
Daeyoung~Kim Hyungrok~Ham, Tae Joon~Jun. | |
\newblock Unbalanced gans: Pre-training the generator of generative adversarial | |
network using variational autoencoder. | |
\newblock \emph{arXiv preprint arXiv:2002.02112}, 2020. | |
\newblock URL \url{http://arxiv.org/abs/2002.02112v1}. | |
\bibitem[Jiyang~Xie \& Jianjun~Lei(2020)Jiyang~Xie and Jianjun~Lei]{2010.05244} | |
Zhanyu~Ma Jiyang~Xie and Jing-Hao Xue Zheng-Hua Tan Jun~Guo Jianjun~Lei, | |
Guoqiang~Zhang. | |
\newblock Advanced dropout: A model-free methodology for bayesian dropout | |
optimization. | |
\newblock \emph{arXiv preprint arXiv:2010.05244}, 2020. | |
\newblock URL \url{http://arxiv.org/abs/2010.05244v2}. | |
\bibitem[Juho~Lee(2018)]{1805.10896} | |
Jaehong Yoon Hae Beom Lee Eunho Yang Sung Ju~Hwang Juho~Lee, Saehoon~Kim. | |
\newblock Adaptive network sparsification with dependent variational | |
beta-bernoulli dropout. | |
\newblock \emph{arXiv preprint arXiv:1805.10896}, 2018. | |
\newblock URL \url{http://arxiv.org/abs/1805.10896v3}. | |
\bibitem[Wangchunshu~Zhou(2020)]{2004.13342} | |
Ke~Xu Furu Wei Ming~Zhou Wangchunshu~Zhou, Tao~Ge. | |
\newblock Scheduled drophead: A regularization method for transformer models. | |
\newblock \emph{arXiv preprint arXiv:2004.13342}, 2020. | |
\newblock URL \url{http://arxiv.org/abs/2004.13342v2}. | |
\bibitem[Weng(2019)]{1904.08994} | |
Lilian Weng. | |
\newblock From gan to wgan. | |
\newblock \emph{arXiv preprint arXiv:1904.08994}, 2019. | |
\newblock URL \url{http://arxiv.org/abs/1904.08994v1}. | |
\bibitem[Xu~Shen(2019)]{1911.12675} | |
Tongliang Liu Fang Xu Dacheng~Tao Xu~Shen, Xinmei~Tian. | |
\newblock Continuous dropout. | |
\newblock \emph{arXiv preprint arXiv:1911.12675}, 2019. | |
\newblock URL \url{http://arxiv.org/abs/1911.12675v1}. | |
\bibitem[Zhiyuan~Zhang(2021)]{2108.08976} | |
Ruihan Bao Keiko Harimoto Yunfang Wu Xu~Sun Zhiyuan~Zhang, Wei~Li. | |
\newblock Asat: Adaptively scaled adversarial training in time series. | |
\newblock \emph{arXiv preprint arXiv:2108.08976}, 2021. | |
\newblock URL \url{http://arxiv.org/abs/2108.08976v2}. | |
\end{thebibliography} | |