\begin{tabular}{ll}
\toprule
{} & Citation \\
\midrule
Analysis of weights & \cite{Perez-Benitez2018, Yoon2018, Langkvist2018, Deiss2018, Lawhern2018, Xu2016, Tsinalis2016a, Nurse2016, Tabar2016a, Zheng2015, Stober2015, Manor2015, Yang2015a, Langkvist2012, Cecotti2011} \\
Analysis of activations & \cite{Yuan2018a, Waytowich2018, Lawhern2018, kwak2017, Yin2017a, Supratak2017, Shamwell2016, Manor2015} \\
Input-perturbation network-prediction correlation maps & \cite{Schirrmeister2017a, Volker2018, Hartmann2018b, Behncke2017, Schirrmeister2017} \\
Generating input to maximize activation & \cite{VanPutten2018b, Ruffini2018a, Sors2018, Bashivan2016a} \\
Occlusion of input & \cite{Lee2018, Chambon2018, Thodoroff2016} \\
Analysis of most-activating input windows & \cite{Hartmann2018b} \\
Retrieval of closest examples & \cite{Deiss2018} \\
Input-feature unit-output correlation maps & \cite{Schirrmeister2017} \\
Analysis of generated outputs & \cite{Hartmann2018} \\
Analysis of performance with transferred layers & \cite{Hajinoroozi2017} \\
Saliency map & \cite{Vilamala2017} \\
Class activation maps & \cite{Ghosh2018} \\
Ablation of filters & \cite{Lawhern2018} \\
Deeplift & \cite{Lawhern2018} \\
\bottomrule
\end{tabular}