#!/usr/bin/python
# -*- coding: utf-8 -*-

import jieba
from collections import Counter
import os, re, json

filePath = "/home/xiaoyu/Documents/DataVisualizationByD3/D3/publicdata/data/Goverment/ROTW";
SINGLE = 0x0
ALL = 0x1


def getWordFreq(str, need=20):

    seg_list = jieba.cut(str, cut_all=False)
    words = [word for word in seg_list if len(word) >= 2]
    c = Counter(words)
    d = dict(c.most_common(need))
    return d


def getFreq(path, type=SINGLE, need=20):
    d = ""
    if type == SINGLE:
        report = path.read().replace(" ", "").replace("\n", "")
        d = getWordFreq(report,need)


    elif type == ALL:
        allreport = ""
        for root, dirs, files in os.walk(path):
            for p in files:
                file = open(path + "/" + p)
                report = file.read().replace(" ", "").replace("\n", "")
                allreport += report
        d = getWordFreq(allreport,need)
    return d


def writeToJson(dict, filepath):
    wordjson = json.dumps(dict, ensure_ascii=False)
    jsonfile = open(filepath, "a")
    jsonfile.write(wordjson)
    jsonfile.close()


if __name__ == '__main__':
    wordlist = getFreq(filePath,type=ALL,need=50)
    writeToJson(wordlist,"/home/xiaoyu/123.json")
