#!/usr/bin/python3.4
# -*- coding: utf-8 -*-
"""
    作者:jinayang
    网站:yangshengliang.com
    版本:1.0
    时间: 2016-11-22
    功能描述：将搜索词分词,并写入目标文件中

"""

__author__ = "jinayang"

from com.yangshengliang.config.config import Config
import glob
import jieba
import os
import time
import jieba.analyse


class Fenci(object):
    def __init__(self):
        print("***********************程序初始化中*********************************")
        print("*******                                                     ******")
        print("*******           计算 词语 的 权重 分词 部分                  ******")
        print("*******                                                     ******")
        print("*********************** 程序初始化中 *******************************")
        self.sep = os.sep
        self.keyword_file_path = Config().config.get("keyword_path") # 待处理搜索短语目录
        self.result_file_path = Config().config.get("result_path") # 处理结果目录
        self.stop_word = Config().config.get("stop_word_path") #停止词目录
        self.fenci_dir = self.sep.join([self.result_file_path, "fenci"])
        try:
            self.file_path = self.sep.join([self.result_file_path, "fenci"])
            if os.path.exists(self.file_path):
                pass
            else:
                os.makedirs(self.file_path)
                print("完成创建用于存放结果的文件夹,完整路径为：%s" % (self.file_path))
        except Exception as e:
            print(e)

        try:
            if os.path.exists( self.keyword_file_path):
                pass
            else:
                os.makedirs( self.keyword_file_path)
                print("完成创建用于存放关键词的文件夹,完整路径为：%s" % ( self.keyword_file_path))
        except Exception as e:
            print(e)

    def getKey(self):
        jieba.analyse.set_stop_words(self.stop_word+"/stopword.txt")
        files = [os.path.abspath(file) for file in glob.glob(self.sep.join([self.keyword_file_path,'*.txt']))]
        for f in files:
            file_name = os.path.splitext(os.path.basename(f))[0]
            self.outfile_fenci = self.fenci_dir+self.sep+time.strftime("%Y-%m-%d", time.localtime(time.time()))+"-%s-fenci.txt"  % (file_name)
            outfile = open(self.outfile_fenci, 'w+')
            se_psg = jieba.cut_for_search(open(f, 'rb').read())
            for line in set(se_psg):
                if len(line) > 1:
                    outfile.write(line+",")

            outfile.close()
        print("*******           分词完成                  ******")


if __name__ == "__main__":
    make = Fenci()
    make.getKey()