\documentclass{article} % For LaTeX2e
\usepackage{iclr2024_conference,times}

\usepackage[utf8]{inputenc} % allow utf-8 input
\usepackage[T1]{fontenc}    % use 8-bit T1 fonts
\usepackage{hyperref}       % hyperlinks
\usepackage{url}            % simple URL typesetting
\usepackage{booktabs}       % professional-quality tables
\usepackage{amsfonts}       % blackboard math symbols
\usepackage{nicefrac}       % compact symbols for 1/2, etc.
\usepackage{microtype}      % microtypography
\usepackage{titletoc}

\usepackage{subcaption}
\usepackage{graphicx}
\usepackage{amsmath}
\usepackage{multirow}
\usepackage{color}
\usepackage{colortbl}
\usepackage{cleveref}
\usepackage{algorithm}
\usepackage{algorithmicx}
\usepackage{algpseudocode}

\DeclareMathOperator*{\argmin}{arg\,min}
\DeclareMathOperator*{\argmax}{arg\,max}

\graphicspath{{../}} % To reference your generated figures, see below.
\begin{filecontents}{references.bib}
@article{lu2024aiscientist,
  title={The {AI} {S}cientist: Towards Fully Automated Open-Ended Scientific Discovery},
  author={Lu, Chris and Lu, Cong and Lange, Robert Tjarko and Foerster, Jakob and Clune, Jeff and Ha, David},
  journal={arXiv preprint arXiv:2408.06292},
  year={2024}
}

@book{goodfellow2016deep,
  title={Deep learning},
  author={Goodfellow, Ian and Bengio, Yoshua and Courville, Aaron and Bengio, Yoshua},
  volume={1},
  year={2016},
  publisher={MIT Press}
}

@article{batatia2023macehigherorderequivariant,
      title={MACE: Higher Order Equivariant Message Passing Neural Networks for Fast and Accurate Force Fields}, 
      author={Ilyes Batatia and Dávid Péter Kovács and Gregor N. C. Simm and Christoph Ortner and Gábor Csányi},
      year={2023},
      eprint={2206.07697},
      archivePrefix={arXiv},
      primaryClass={stat.ML},
      url={https://arxiv.org/abs/2206.07697}, 
}

@misc{satorras2022enequivariantgraphneural,
      title={E(n) Equivariant Graph Neural Networks}, 
      author={Victor Garcia Satorras and Emiel Hoogeboom and Max Welling},
      year={2022},
      eprint={2102.09844},
      archivePrefix={arXiv},
      primaryClass={cs.LG},
      url={https://arxiv.org/abs/2102.09844}, 
}

@inproceedings{NIPS2017_303ed4c6,
 author = {Sch\"{u}tt, Kristof and Kindermans, Pieter-Jan and Sauceda Felix, Huziel Enoc and Chmiela, Stefan and Tkatchenko, Alexandre and M\"{u}ller, Klaus-Robert},
 booktitle = {Advances in Neural Information Processing Systems},
 editor = {I. Guyon and U. Von Luxburg and S. Bengio and H. Wallach and R. Fergus and S. Vishwanathan and R. Garnett},
 pages = {},
 publisher = {Curran Associates, Inc.},
 title = {SchNet: A continuous-filter convolutional neural network for modeling quantum interactions},
 url = {https://proceedings.neurips.cc/paper_files/paper/2017/file/303ed4c69846ab36c2904d3ba8573050-Paper.pdf},
 volume = {30},
 year = {2017}
}

@article{WIEDER20201,
title = {A compact review of molecular property prediction with graph neural networks},
journal = {Drug Discovery Today: Technologies},
volume = {37},
pages = {1-12},
year = {2020},
issn = {1740-6749},
doi = {https://doi.org/10.1016/j.ddtec.2020.11.009},
url = {https://www.sciencedirect.com/science/article/pii/S1740674920300305},
author = {Oliver Wieder and Stefan Kohlbacher and Mélaine Kuenemann and Arthur Garon and Pierre Ducrot and Thomas Seidel and Thierry Langer},
keywords = {AI, Deep-learning, Neural-networks, Graph neural-networks, Molecular representation, Molecular property, Drug discovery, Computational chemistry},
abstract = {As graph neural networks are becoming more and more powerful and useful in the field of drug discovery, many pharmaceutical companies are getting interested in utilizing these methods for their own in-house frameworks. This is especially compelling for tasks such as the prediction of molecular properties which is often one of the most crucial tasks in computer-aided drug discovery workflows. The immense hype surrounding these kinds of algorithms has led to the development of many different types of promising architectures and in this review we try to structure this highly dynamic field of AI-research by collecting and classifying 80 GNNs that have been used to predict more than 20 molecular properties using 48 different datasets.}
}

\end{filecontents}

\title{TITLE HERE}

\author{GPT-4o \& Claude\\
Department of Computer Science\\
University of LLMs\\
}

\newcommand{\fix}{\marginpar{FIX}}
\newcommand{\new}{\marginpar{NEW}}

\begin{document}

\maketitle

\begin{abstract}
ABSTRACT HERE
\end{abstract}

\section{Introduction}
\label{sec:intro}
INTRO HERE

\section{Related Work}
\label{sec:related}
RELATED WORK HERE

\section{Background}
\label{sec:background}
BACKGROUND HERE

\section{Method}
\label{sec:method}
METHOD HERE

\section{Experimental Setup}
\label{sec:experimental}
EXPERIMENTAL SETUP HERE

% EXAMPLE FIGURE: REPLACE AND ADD YOUR OWN FIGURES / CAPTIONS
\begin{figure}[t]
    \centering
    \begin{subfigure}{0.9\textwidth}
        \includegraphics[width=\textwidth]{generated_images.png}
        \label{fig:diffusion-samples}
    \end{subfigure}
    \caption{PLEASE FILL IN CAPTION HERE}
    \label{fig:first_figure}
\end{figure}

\section{Results}
\label{sec:results}
RESULTS HERE

\section{Conclusions and Future Work}
\label{sec:conclusion}
CONCLUSIONS HERE

This work was generated by \textsc{The AI Scientist} \citep{lu2024aiscientist}.

\bibliographystyle{iclr2024_conference}
\bibliography{references}

\end{document}
