# coding=utf-8 ##以utf-8编码储存中文字符
import codecs
import pandas as pd
import Core.MongoDB as db
from bs4 import BeautifulSoup
import string

def ReadCSVFile(fullpathfilename, spliter):
    file = open(fullpathfilename, 'rU', encoding="utf-8")
    #file = codecs.open('D:/JDBigData/test1.log', 'r', 'utf-8')
    i = 0
    k = 0
    headerIndexByName = {}
    table1 = []
    num = 0
    isIndex = 0
    right = 0
    wrong = 0
    line = ""
    while True:
        s = file.readline()
        i = i+1
        if s == '':
            break

        content = s.split(spliter)

        if content[0].isdigit() and int(content[0]) > 32767:
            #
            if line == "":
                line = s
                continue

            realContent = line.split(spliter)

            # Check
            if len(realContent) == 29:
                right += 1
                table1.append(line.split(spliter))
            else:
                wrong += 1

            #for data in realContent:
            #    pass


            # New Line
            isIndex += 1
            line = s
        else:
            line += s

        #entry = []
        #for data in content:
        #    if '\\n' in data:
        #        num += 1
            #data = data.strip("\n")
            #entry.append(data)
        #table.append(entry)

    #print("Isindex", isIndex)
    #print("right", right)
    #print("wrong", wrong)

    table2 = pd.DataFrame(table1)

    return table2


def LoadArticles(fullpathfilename):
    #table = pd.read_csv('D:/myfile/article_publish3_rec_article.log', lineterminator='\r\n', sep='||||', header=None, encoding='utf-8')
    table = ReadCSVFile(fullpathfilename, spliter="||||")
    table.columns = ['id', 'data_source', 'busi_type', 'craw_source_dict_id', 'copr_dict_id', 'old_id', 'title',
                     'show_title', 'content', 'focus_img_url', 'tag', 'author', 'copy_right', 'service_id',
                     'push_channel', 'push_summary', 'summary', 'article_url', 'timer', 'show_status',
                     'publish_status', 'publish_user', 'publish_time', 'create_user', 'create_time',
                     'modified_user', 'modified_time', 'is_deleted', 'sync_id']

    count = 0
    for i in range(len(table)):
        count = count + 1

        id = table.loc[i, 'id']
        #busi_type = table.loc[i, 'busi_type']
        #title = table.loc[i, 'title']
        content = table.loc[i, 'content']
        soup = BeautifulSoup(content)
        article = soup.get_text()

        for st in string.punctuation:
            article = article.replace(st, '')
        article = article.strip()
        article = article.replace(' ', '')



        length = len(article)
        #summary = table.loc[i, 'summary']
        #publish_time = table.loc[i, 'publish_time']
        #create_time = table.loc[i, 'create_time']
        #modified_time = table.loc[i, 'modified_time']

        articleDoc = {}
        articleDoc["length"] = length
        #articleDoc["id"] = str(id)
        #articleDoc["busi_type"] = busi_type
        #articleDoc["title"] = title
        #articleDoc["content"] = content
        #articleDoc["summary"] = summary
        #articleDoc["publish_time"] = publish_time
        #articleDoc["create_time"] = create_time
        #articleDoc["modified_time"] = modified_time

        articleDoc["Key"] = str(id)
        print(count, id)

        # --- Insert Database---
        database.upsert("Recommender", "Article", {"Key": articleDoc["Key"]}, articleDoc)
        pass


database = db.MongoDB("192.168.0.100", "27017")

#database.creatIndex("Recommender","Article","Key")
#database.creatIndex("Recommender","Article","publish_time")

LoadArticles(fullpathfilename='D:/Data/Article/article_publish2_rec_article.log' )

