#!/usr/bin/python3
# -*- coding: utf-8; tab-width: 4; indent-tabs-mode: t -*-

import os
import re
import sys
import time
import mrget
import socket
import shutil
import pathlib
import tarfile
import psycopg
import subprocess
import http.client
import urllib.error
import urllib.request
import robust_layer.simple_git

import mirrors.plugin


class Main:

    def __init__(self, sock):
        self.DATE_FMT = "%Y-%m-%d"

        # from https://musicbrainz.org/doc/MusicBrainz_Database/Download
        self.dumpFileList = [
            "mbdump.tar.bz2",
            "mbdump-derived.tar.bz2",
            "mbdump-edit.tar.bz2",
            "mbdump-editor.tar.bz2",
            "mbdump-cdstubs.tar.bz2",
            "mbdump-stats.tar.bz2",
            "mbdump-cover-art-archive.tar.bz2",
            "mbdump-event-art-archive.tar.bz2",
            "mbdump-documentation.tar.bz2",
            "mbdump-wikidocs.tar.bz2",
        ]

        self.bInit = (mirrors.plugin.params["run-mode"] == "init")
        self.dbSockDir = os.path.dirname(mirrors.plugin.params["storage-postgresql"]["unix-socket-file"])
        self.dbPort = mirrors.plugin.params["storage-postgresql"]["unix-socket-file"].split(".")[-1]
        self.dbFilesDir = mirrors.plugin.params["storage-postgresql"]["files-directory"]

        # {
        #     "live-data-feed": {
        #         "token": "XXXXXXXXXXXXX",
        #     }
        # }
        self.liveDataFeedToken = None
        if "live-data-feed" in mirrors.plugin.params["config"]:
            if "token" not in mirrors.plugin.params["config"]["live-data-feed"]:
                raise Exception("no \"token\" in \"live-data-feed\"")
            self.liveDataFeedToken = mirrors.plugin.params["config"]["live-data-feed"]["token"]

        # {
        #     "extra-dump-files": [
        #         "XXXXX",
        #     ]
        # }
        if "extra-dump-files" in mirrors.plugin.params["config"]:
            self.dumpFileList = self.dumpFileList[:1] + [x for x in self.dumpFileList[1:] if x in mirrors.plugin.params["config"]["extra-dump-files"]]

        self.sock = sock
        self.p = InfoPrinter()
        self.progress = 0

    def run(self):
        if self.bInit or self.liveDataFeedToken is None:
            # full update
            self._fullUpdate()
        else:
            # incremental update
            try:
                self._incrementalUpdate()
            except Exception:
                self._fullUpdate()

    def _fullUpdate(self):
        repoDir = self.__downloadRepository()               # download mbdata repository
        snapshotDir = self.__downloadSnapshot()             # download dump files
        self.__importDatabase(repoDir, snapshotDir)         # import database
        Util.forceClearDir(self.dbFilesDir)                 # delete downloaded files

    def _incrementalUpdate(self):
        # FIXME
        raise Exception("abc")

    def __downloadRepository(self):
        url = mrget.target_urls("mirror://github/acoustid/mbdata")[0]
        localDir = os.path.join(self.dbFilesDir, "mbdata.git")

        self.p.print("Downloading from \"%s\"..." % (url))
        self.p.incIndent()
        try:
            robust_layer.simple_git.pull(localDir, reclone_on_failure=True, url=url)
        finally:
            self.p.decIndent()

        return localDir

    def __downloadSnapshot(self):
        url = "http://ftp.musicbrainz.org/pub/musicbrainz/data/fullexport"

        self.p.print("Downloading from \"%s\"..." % (url))
        self.p.incIndent()
        try:
            # get date dirname
            latestContent = self.__getContent(os.path.join(url, "LATEST")).strip("\n")

            # create local date dir
            localDir = os.path.join(self.dbFilesDir, latestContent)
            os.makedirs(localDir, exist_ok=True)

            # download dump files
            for fn in self.dumpFileList:
                ascFn = fn + ".asc"
                if not os.path.exists(os.path.join(localDir, ascFn)):
                    self.__downloadFile(os.path.join(url, latestContent, fn), os.path.join(localDir, fn))
                    self.__downloadFile(os.path.join(url, latestContent, ascFn), os.path.join(localDir, ascFn))
        finally:
            self.p.decIndent()

        return localDir

    def __importDatabase(self, repoDir, snapshotDir):
        dbName = "musicbrainz"
        tmpDbName = "_musicbrainz"
        superDbName = "postgres"

        self.p.print("Importing data...")
        self.p.incIndent()
        try:
            # create temporary database
            self.p.print("Creating database...")
            with psycopg.connect(host=self.dbSockDir, port=self.dbPort, dbname=superDbName, autocommit=True) as db:
                with db.cursor() as c:
                    c.execute("DROP DATABASE IF EXISTS %s WITH (FORCE);" % (tmpDbName))
                    c.execute("CREATE DATABASE %s;" % (tmpDbName))

            with psycopg.connect(host=self.dbSockDir, port=self.dbPort, dbname=tmpDbName, autocommit=True) as db:
                with db.cursor() as c:
                    # create musicbrainz schemas
                    self.p.print("Creating schemas...")
                    self.p.incIndent()
                    try:
                        # from https://github.com/acoustid/mbdata/blob/main/README.rst
                        schemaList = [
                            "musicbrainz",
                            "cover_art_archive",
                            "wikidocs",
                            "documentation",
                            "statistics",
                        ]
                        for schema in schemaList:
                            c.execute("CREATE SCHEMA %s;" % (schema))
                    finally:
                        self.p.decIndent()

                    # manipulate musicbrainz extensions, stealthly
                    if True:
                        fullfn = os.path.join(repoDir, "mbdata", "sql", "Extensions.sql")
                        c.execute(self.__getSqlFileContent(fullfn))

                    # create musicbrainz collaction, stealthly
                    if True:
                        # from https://github.com/acoustid/mbdata/blob/main/README.rst
                        fullfn = os.path.join(repoDir, "mbdata", "sql", "CreateCollations.sql")
                        c.execute(self.__getSqlFileContent(fullfn))

                    # create musicbrainz types, stealthly
                    if True:
                        # from https://github.com/acoustid/mbdata/blob/main/README.rst
                        fullfn = os.path.join(repoDir, "mbdata", "sql", "CreateTypes.sql")
                        c.execute("SET search_path = musicbrainz, public;")                         # perhaps we should make types be in musicbrainz schema
                        c.execute(self.__getSqlFileContent(fullfn))
                        c.execute("SET search_path TO DEFAULT;")

                    # create tables
                    self.p.print("Creating tables...")
                    self.p.incIndent()
                    try:
                        # from https://github.com/acoustid/mbdata/blob/main/README.rst
                        createTableSqlList = [
                            "CreateTables.sql",
                            "caa/CreateTables.sql",
                            "wikidocs/CreateTables.sql",
                            "documentation/CreateTables.sql",
                            "statistics/CreateTables.sql",
                        ]
                        for fn in createTableSqlList:
                            self.p.print("Executing %s..." % (fn))
                            c.execute("SET search_path = musicbrainz, public;")                     # make tables with no schema be in musicbrainz schema
                            c.execute(self.__getSqlFileContent(os.path.join(repoDir, "mbdata", "sql", fn)))
                        c.execute("SET search_path TO DEFAULT;")
                    finally:
                        self.p.decIndent()

                    # import data
                    # FIXME: set max_wal_size to maximum to raise performance and eliminate some postgresql warnings
                    for filename in self.dumpFileList:
                        self.p.print("Importing \"%s\"..." % (filename))
                        tar = tarfile.open(os.path.join(snapshotDir, filename), 'r:bz2')
                        for member in tar:
                            if not member.name.startswith('mbdump/'):
                                continue
                            table = member.name[len('mbdump/'):]
                            assert '_sanitised' not in table
                            if '.' in table:
                                schema, table = table.split('.', 1)
                                assert '"' not in schema
                            else:
                                schema = 'musicbrainz'
                            assert '"' not in table
                            with c.copy('COPY %s.%s FROM STDIN' % (schema, table)) as copy:
                                f = tar.extractfile(member)
                                while data := f.read(65536):
                                    copy.write(data)

                    # post process
                    self.p.print("Post processing...")
                    self.p.incIndent()
                    try:
                        # from https://github.com/acoustid/mbdata/blob/main/README.rst
                        createPrimaryKeySqlList = [
                            "CreatePrimaryKeys.sql",
                            "caa/CreatePrimaryKeys.sql",
                            "wikidocs/CreatePrimaryKeys.sql",
                            "documentation/CreatePrimaryKeys.sql",
                            "statistics/CreatePrimaryKeys.sql",
                        ]
                        createIndexSqlList = [
                            "CreateIndexes.sql",
                            "caa/CreateIndexes.sql",
                            "statistics/CreateIndexes.sql",
                        ]
                        createOtherSqlList = [
                            "CreateSearchConfiguration.sql",
                            "CreateFunctions.sql",
                            "CreateViews.sql",
                        ]
                        for fn in createPrimaryKeySqlList + createIndexSqlList + createOtherSqlList:
                            self.p.print("Executing %s..." % (fn))
                            c.execute("SET search_path = musicbrainz, public;")                     # make objects with no schema be in musicbrainz schema
                            c.execute(self.__getSqlFileContent(os.path.join(repoDir, "mbdata", "sql", fn)))
                        c.execute("SET search_path TO DEFAULT;")
                    finally:
                        self.p.decIndent()

            # rename to formal database
            with psycopg.connect(host=self.dbSockDir, port=self.dbPort, dbname=superDbName, autocommit=True) as db:
                with db.cursor() as c:
                    c.execute("DROP DATABASE IF EXISTS %s WITH (FORCE);" % (dbName))
                    c.execute("ALTER DATABASE %s RENAME TO %s;" % (tmpDbName, dbName))
        finally:
            self.p.decIndent()

    def __getContent(self, url):
        while True:
            try:
                return urllib.request.urlopen(url).read().decode("iso8859-1")
            except socket.timeout as e:
                self.p.print("urlopen failed and try again: %s" % str(e))
                time.sleep(10.0)
            except http.client.HTTPException as e:
                self.p.print("urlopen failed and try again: %s" % str(e))
                time.sleep(10.0)
            except urllib.error.HTTPError as e:
                if e.code == 404:
                    raise
                self.p.print("urlopen failed and try again: %s" % str(e))
                time.sleep(10.0)
            except urllib.error.URLError as e:
                self.p.print("urlopen failed and try again: %s" % str(e))
                time.sleep(10.0)

    def __downloadFile(self, url, localFile):
        while True:
            try:
                subprocess.check_call(["wget", "--quiet", "--no-check-certificate", "-O", localFile, url])      # always re-dowloand
                break
            except subprocess.CalledProcessError as e:
                if e.returncode == 8:       # not found
                    raise
                self.p.print("download failed and try again: %s" % str(e))
                time.sleep(60)

    def __getSqlFileContent(self, fullfn):
        buf = pathlib.Path(fullfn).read_text()
        buf = re.sub(r"^\\set ON_ERROR_STOP 1$", "", buf, flags=re.M)
        buf = re.sub(r"^\\unset ON_ERROR_STOP$", "", buf, flags=re.M)
        buf = re.sub(r"^--.*$", "", buf, flags=re.M)                        # example: "-- vi: set ts=4 sw=4 et :"
        return buf


class InfoPrinter:

    def __init__(self):
        self.indent = 0

    def incIndent(self):
        self.indent = self.indent + 1

    def decIndent(self):
        assert self.indent > 0
        self.indent = self.indent - 1

    def print(self, s):
        line = ""
        line += "\t" * self.indent
        line += s
        print(line)


class Util:

    @staticmethod
    def forceDelete(path):
        if os.path.islink(path):
            os.remove(path)
        elif os.path.isfile(path):
            os.remove(path)
        elif os.path.isdir(path):
            shutil.rmtree(path)
        elif os.path.lexists(path):
            os.remove(path)             # other type of file, such as device node
        else:
            pass                        # path does not exist, do nothing

    @staticmethod
    def forceClearDir(path):
        for fn in os.listdir(path):
            Util.forceDelete(os.path.join(path, fn))


###############################################################################

if __name__ == "__main__":
    with mirrors.plugin.ApiClient() as sock:
        try:
            Main(sock).run()
            sock.progress_changed(100)
        except Exception:
            sock.error_occured(sys.exc_info())
            raise
