#coding=utf-8
import codecs
import collections
import json
import os, re
import os.path
import sys
import traceback
import glob
import htmlmin
from bs4 import BeautifulSoup
from readmdict import MDX, MDD
import sqlite3
import hashlib

def create_db():
    conn = sqlite3.connect('english_dict.db')
    print("Opened database successfully")
    c = conn.cursor()
    c.execute('''CREATE TABLE DICT
        (NAME           TEXT    NOT NULL,
        MEAN           TEXT);''')
    print("Table created successfully")
    conn.commit()
    conn.close()


def insert_word(conn, word, mean):
    try:
        c = conn.cursor()
        #print("Opened database successfully")

        c.execute("INSERT INTO DICT (NAME,MEAN) VALUES ('{}', '{}' )".format(word, mean))

        conn.commit()
    except:
        print(traceback.format_exc())

#      2   Encoding : BIG5
#     48   Encoding : GBK
#      5   Encoding : ISO8859-1
#     63   Encoding : UTF-16
#    342   Encoding : UTF-8
#    460   Format : Html

dict_source_map = {}

def parse_extra(explain):
    result = ""
    soup = BeautifulSoup(explain, "html.parser")
    #print(soup.prettify())
    ggs=soup.select('span[class="gramGrp"] > span[class="pos"]')
    #for g in ggs:
    #    #tf.write("{0!s}|{1!s}\n".format(word, g.get_text()))
    result = ",".join(g.get_text() for g in ggs) + "|"

    ggs=soup.select('span[class*="inflected_forms"] > span[class="orth"]')
    #for g in ggs:
    #    #tf.write("{0!s}|{1!s}\n".format(word, g.get_text()))
    result = result + ",".join(g.get_text() for g in ggs) + "|"

    return result

means_map = {}

def parse_mdx(mdx_file):
    global conn

    tf = open("e:\\title.txt", 'r',encoding='utf8')
    for line in tf.readlines():
        items = re.split("\|\|", line)
        if len(items) >=3:
            dict_source_map[items[0]] = dict()
            dict_source_map[items[0]]["encoding"] = items[1].strip()
            dict_source_map[items[0]]["source"]   = items[2].strip()
    tf.close()


    if not os.path.exists(mdx_file):
        print("Please specify a valid MDX/MDD file")

    base, ext = os.path.splitext(mdx_file)

    ## write out glos
    only_test=True
    #if only_test:
    #    output_fname = ''.join(["g:\\test", os.path.extsep, 'txt.pos'])
    #    tf = open(output_fname, 'a+',encoding='utf8')
    #else:
    #    output_fname = ''.join([base, os.path.extsep, 'txt.pos'])
    #    tf = open(output_fname, 'w',encoding='utf8')

    # read mdx file
    try:
        mdx = MDX(mdx_file, "", False, None)
        if type(mdx_file) is str:
            bfname = mdx_file.encode('utf-8')
        else:
            bfname = mdx_file
        print('======== %s ========' % bfname)
        print('  Number of Entries : %d' % len(mdx))
        #for key, value in mdx.header.items():
        #    tf.write('  %s : %s\n' % (key.decode("utf-8", "ignore").strip(), value.decode("utf-8", "ignore").strip().replace("\r\n","\\n").replace("\n","\\n")))
        #desc=mdx.header[b'Description'].decode("utf-8", "ignore").strip().replace("\r\n","\\n").replace("\n","\\n")
        #title=mdx.header[b'Title'].decode("utf-8", "ignore").strip().replace("\r\n","\\n").replace("\n","\\n")
        #enc=mdx.header[b'Encoding'].decode("utf-8", "ignore").strip().replace("\r\n","\\n").replace("\n","\\n")
        #desc = BeautifulSoup(desc, "html.parser")
        #
        #tf.write("{}||{}||{}||{}\n".format(bfname.decode("utf-8", "ignore").strip(),enc, title, desc.get_text()) )
        #return

        #try:
        #    tf.write("=========== "+mdx_file+"\n")
        #
        #except:
        #    print(traceback.format_exc())
        output_fname = ''.join(["g:\\"+os.path.basename(mdx_file), os.path.extsep, 'html.txt'])
        tf = open(output_fname, 'w',encoding='utf8')

        dict_source_name = dict_source_map.get(bfname.decode("utf-8", "ignore").strip())
        if dict_source_name:
            dict_source_name = dict_source_name["source"]
        else:
            dict_source_name = ""
        #tf.write(dict_source_name+"\n")

        for key, value in mdx.items():
            try:
                # bring||brought|brought|
                word = key.decode("utf-8", "ignore").strip().replace("\r\n","").replace("\n","")
                explain = value.decode("utf-8", "ignore").strip().replace("\r\n","").replace("\n","")
                if re.search("^[a-zA-Z' _-]+$", word):
                    h = hashlib.sha256(key).hexdigest()+hashlib.sha256(value).hexdigest() + hashlib.md5(value).hexdigest()
                    oldword = means_map.get(h)
                    if oldword:
                        print("***************"+oldword + " " + word)
                        continue
                    means_map[h] = word

                    #insert_word(conn, word, explain)
                    tf.write("@@==@@|{}|{}|{}\n".format(word, dict_source_name, explain))


                if only_test is False:
                    tf.write("{}|{}\n".format(word, parse_extra(explain)))

                elif only_test is False and False:
                    #tf.write(word + " " + str(type(word)) +"\n")
                    if "bring" == word :
                        soup = BeautifulSoup(explain, "html.parser")
                        #print(soup.prettify())
                        ggs=soup.select('span[class="gramGrp"] > span[class="pos"]')
                        for g in ggs:
                            tf.write("{0!s}|{1!s}\n".format(word, g.get_text()))

                        ggs=soup.select('span[class*="inflected_forms"] > span[class="orth"]')
                        for g in ggs:
                            tf.write("{0!s}|{1!s}\n".format(word, g.get_text()))

                        #tf.write("{0!s}|{1!s}\n".format(word, explain))
                        #print('  %s : %s' % (key, value))
                else:
                    #tf.write(word+"\n")
                    #if "bring" == word :
                    #    tf.write("=="+mdx_file+"\n")
                    #    tf.write("{0!s}|{1!s}\n".format(word, htmlmin.minify(explain)))
                    pass

            except:
                print(traceback.format_exc())
    except:
        return

    tf.close()

#====================================

#create_db()

#conn = sqlite3.connect('english_dict.db')

files=glob.glob("G:\\dict\\*.mdx")
for f in files:
    parse_mdx(f)
    pass

#conn.close()


