################################################################################
# Copyright (c) 2021 ContinualAI.                                              #
# Copyrights licensed under the MIT License.                                   #
# See the accompanying LICENSE file for terms.                                 #
#                                                                              #
# Date: 1-05-2020                                                              #
# Author(s): Vincenzo Lomonaco                                                 #
# E-mail: contact@continualai.org                                              #
# Website: avalanche.continualai.org                                           #
################################################################################

# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config

# -- Path setup --------------------------------------------------------------

# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys

import pkgutil
from importlib import import_module
from typing import Any

from jinja2.filters import FILTERS


sys.path.insert(0, os.path.abspath(".."))


# -- Project information -----------------------------------------------------

project = "Avalanche"
copyright = "2022, ContinualAI"
author = "ContinualAI"

# The short X.Y version
version = ""
# The full version, including alpha/beta/rc tags
release = "0.1"


# -- General configuration ---------------------------------------------------

# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'

# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
    "sphinx.ext.doctest",
    "sphinx.ext.todo",
    "sphinx.ext.mathjax",
    "sphinx.ext.viewcode",
    "sphinx.ext.autodoc",
    "sphinx.ext.autosummary",
    "sphinx.ext.githubpages",
    "sphinx.ext.coverage",
    "sphinx_rtd_theme",
    "sphinx_copybutton",
]

autosummary_generate = True

coverage_show_missing_items = True

# Add any paths that contain templates here, relative to this directory.
templates_path = ["./_templates"]

# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"

# The master toctree document.
master_doc = "index"

# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = "en"

# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "_templates"]

# The name of the Pygments (syntax highlighting) style to use.
pygments_style: None = None

# include __init__ in the class documentation
autoclass_content = "class"


# -- Options for HTML output -------------------------------------------------

# The theme to use for HTML and HTML Help pages.  See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
html_logo = "./_static/img/avalanche_logo.png"

# Theme options are theme-specific and customize the look and feel of a theme
# further.  For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
    "collapse_navigation": True,  # set to True to speed up build
    "navigation_depth": 2,  # lower depth to speed up build
    "logo_only": True,
    "display_version": True,
}

# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]

# custom css
html_style = "css/mystyle.css"

# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself.  Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}


# -- Options for HTMLHelp output ---------------------------------------------

# Output file base name for HTML help builder.
htmlhelp_basename = "Avalanchedoc"


# -- Options for LaTeX output ------------------------------------------------

latex_elements: dict[Any, Any] = {
    # The paper size ('letterpaper' or 'a4paper').
    #
    # 'papersize': 'letterpaper',
    # The font size ('10pt', '11pt' or '12pt').
    #
    # 'pointsize': '10pt',
    # Additional stuff for the LaTeX preamble.
    #
    # 'preamble': '',
    # Latex figure (float) alignment
    #
    # 'figure_align': 'htbp',
}

# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
#  author, documentclass [howto, manual, or own class]).
latex_documents = [
    (
        master_doc,
        "Avalanche.tex",
        "Avalanche Documentation",
        "ContinualAI Research",
        "manual",
    ),
]


# -- Options for manual page output ------------------------------------------

# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, "avalanche", "Avalanche Documentation", [author], 1)]


# -- Options for Texinfo output ----------------------------------------------

# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
#  dir menu entry, description, category)
texinfo_documents = [
    (
        master_doc,
        "Avalanche",
        "Avalanche Documentation",
        author,
        "Avalanche",
        "One line description of project.",
        "Miscellaneous",
    ),
]


# -- Options for Epub output -------------------------------------------------

# Bibliographic Dublin Core info.
epub_title = project

# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''

# A unique identification for the text.
#
# epub_uid = ''

# A list of files that should not be packed into the epub file.
epub_exclude_files = ["search.html"]


# -- Extension configuration -------------------------------------------------

# -- Options for todo extension ----------------------------------------------

# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True


# -- Doc Coverage -------------------------------------------------------------
from sphinx.ext.coverage import CoverageBuilder
import avalanche
import inspect
import pkgutil


# These classes are currently not documented in the api-doc. Some of these may
# be private, so it's ok not to add them. If you feel they should be documented
# remove them from this list and add them in some .rst file.
# You can check coverage with the command:
#   sphinx-build -b coverage . _build
undocumented_classes_to_ignore = [
    # benchmarks
    "IDataset",
    "TensorMNIST",
    "SpeechCommandsData",
    "ClassAccuracyPluginMetric",
    "MeanScoresTrainPluginMetric",
    "MeanScoresEvalPluginMetric",
    "AMCAPluginMetric",
    "DictLVIS",
    "LvisEvaluator",
    "CocoEvaluator",
    "DetectionEvaluator",
    "ClassificationDataset",
    "YTransformDef",
    "StreamDef",
    "Flatten",
    "XComposedTransformDef",
    "SubSequence",
    "SimpleDownloadableDataset",
    "LazyDatasetSequence",
    "INATURALIST_DATA",
    "FilelistDataset",
    "ClassificationScenarioStream",
    "MaskedAttributeError",
    "MultiParamTransform",
    "PixelsPermutation",
    "SubsetWithTargets",
    "IClassificationDataset",
    "StreamUserDef",
    "ClassificationSubSequence",
    "ConstantSequence",
    "SequenceDataset",
    "DownloadableDataset",
    "PathsDataset",
    "Compose",
    "VideoSubSequence",
    "PennFudanDataset",
    "IDatasetWithTargets",
    "ISupportedClassificationDataset",
    "LazyStreamDefinition",
    "ITensorDataset",
    "XTransformDef",
    "LazyClassesInExps",
    "_LazyStreamClassesInExps",
    # evaluation
    "MACPluginMetric",
    "CPUPluginMetric",
    "TimePluginMetric",
    "RAMPluginMetric",
    "GPUPluginMetric",
    "DiskPluginMetric",
    "TopkAccuracyPluginMetric",
    "AccuracyPluginMetric",
    "MeanScoresPluginMetricABC",
    "GenericStreamForgetting",
    "GenericStreamForwardTransfer",
    "GenericExperienceForwardTransfer",
    "GenericExperienceForgetting",
    "LossPluginMetric",
    "TensorEncoder",
    "TensorImage",
    "AlternativeValues",
    "LabelsRepartitionPlugin",
    # Training
    "AlreadyTrainedError",
    "VAETraining",
    "Clock",
    "PeriodicEval",
    # Utils
    "LayerAndParameter",
    "ComposeMaxParamsWarning",
    # Models
    "IdentityShortcut",
    "ResidualBlock",
    "Generator",
    # Other
    "L2Normalization",
    "PPPloss",
    "COCO",
    "ClassificationExperience",
    "LVISAnnotationEntry",
    "ExperienceMode",
    "LVISImgEntry",
    "VAEMLPDecoder",
    "MultiTaskDecorator",
    "CLEARMetric",
    "LVISDetectionTargets",
    "LVIS",
    "GenericCLScenario",
    "BatchRenorm2D",
    "OpenLORISDataset",
    "VAEMLPEncoder",
    "LvisDataset",
    "SupervisedDetectionDataset",
    "GenericDetectionExperience",
    "SupervisedStrategyProtocol",
    "ClassBalancedBufferWithLogits",
    "SequenceCLStream",
    "LVISImgTargets",
    "CLStreamWrapper",
    "BiasLayer",
    "DetectionCLScenario",
    "GeneratorMemo",
    "LossPerTaskPluginMetric",
    "MultiDatasetDataLoader",
    "AbstractClassTimelineExperience",
    "DefaultTransformGroups",
    "BroadcastSeedContext",
    "LazyIndices",
    "SupervisedProblem",
    "DatasetStream",
    "MetaUpdate",
    "OnlineClassificationExperience",
    "FactoryBasedStream",
    "DetectionDataset",
    "ExpertAutoencoder",
    "ExpertModel",
    "NIStream",
    "SGDUpdate",
    "ClassificationScenario",
    "TransformGroups",
    "EmptyTransformGroups",
    "BatchObservation",
    "SCRModel",
    "MERBuffer",
    "BasicBlock",
    "SequenceStreamWrapper",
    "SizedCLStream",
    "DetectionExperience",
    "SupervisedClassificationDataset",
    "DatasetExperience",
    "SizedCLStreamWrapper",
    "Prompt",
    "ResNet",
    "SliceSequence",
    "DetectionStream",
    "ExperienceWrapper",
    "ClassesTimelineCLScenario",
    "DetectionScenario",
    "AETraining",
    "BaseStrategyProtocol",
    "MappedUnpickler",
    "FlatData",
    "ParamData",
    "MultiDatasetSampler",
    "DatasetScenario",
    "NCStream",
    "GenericClassificationExperience",
    "MultiParamCompose",
    "LazyRange",
    "SGDStrategyProtocol",
    "SupervisedMetaLearningTemplate",
    "RollingSeedContext",
    "TaskLabels",
    "SupervisedMetaLearningPlugin",
    "TupleTransform",
    "MetaLearningStrategyProtocol",
    "MultiParamTransformCallable",
    "TqdmUpTo",
]
undocumented_classes_to_ignore = set(undocumented_classes_to_ignore)


def coverage_post_process(app, exception):
    if exception is not None:
        return

    # Only run this test for the coverage build
    if not isinstance(app.builder, CoverageBuilder):
        return

    # we collected what has been already documented by sphinx to compare it
    # with the full list of classes of Avalanche.
    doc_classes = app.env.domaindata["py"]["objects"]
    doc_classes = set([s.split(".")[-1] for s in doc_classes])
    # print(doc_classes)
    # STRONG ASSUMPTION HERE: unique names for classes in different namespaces.
    # Otherwise, we need to detect the case when mylib.Type is documented but
    # mylib.a.Type (submodule) is not, and I don't want to do this. Also,
    # uniqueness hold in Avalanche (for the moment).

    def is_not_internal(name):
        """Internal modules/classes start with underscores."""
        split_name = name.split(".")
        for name in split_name:
            if name[0] == "_":
                return False
        return True

    # print("Search classes:")
    try:
        lib_classes = set()
        for _, modname, ispkg in pkgutil.walk_packages(
            path=avalanche.__path__, prefix=avalanche.__name__ + "."
        ):
            # print("MODULE: " + modname)
            try:
                for name, obj in inspect.getmembers(sys.modules[modname]):
                    # print(name)
                    if (
                        inspect.isclass(obj)
                        and obj.__module__.startswith("avalanche")
                        and is_not_internal(obj.__module__ + "." + name)
                    ):
                        # print("CLASS: " + obj.__module__ + '.' + obj.__name__)
                        lib_classes.add(name)
            except Exception as e:
                # TODO: I got some errors on lvis.
                # Also seems to crash on module attributes that are
                # instance variables instead of classes/functions/modules.
                # No idea why, but we can ignore them for the moment.
                print(
                    f"KeyError on module {modname}, class {name}, exception "
                    f"type {type(e)}"
                )

    except Exception as e:
        print("ERROR!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")
        print(f"on module {modname}, class {name}, exception type {type(e)}")
        print("ERROR MESSAGE: ", e)

    print(lib_classes)

    missing_classes = lib_classes - doc_classes
    missing_classes = missing_classes - undocumented_classes_to_ignore
    print("MISSING CLASSES: ")
    for el in missing_classes:
        print(f"\t- {el}")


# Called automatically by Sphinx, making this `conf.py` an "extension".
def setup(app):
    app.connect("build-finished", coverage_post_process)


def get_attributes(item, obj, modulename):
    """Filters attributes to be used in autosummary.
    Fixes import errors when documenting inherited attributes with autosummary.
    """
    module = import_module(modulename)
    if hasattr(getattr(module, obj), item):
        return f"~{obj}.{item}"
    else:
        return ""


FILTERS["get_attributes"] = get_attributes
