import base64
import pandas as pd
import json
from sklearn.feature_extraction.text import CountVectorizer
import pickle
from Crypto.Cipher import ARC4 as rc4cipher
import datetime
import pymysql
import re
import warnings
import os
import redis
warnings.filterwarnings('ignore')

#读取所有文件
def listdir(path, list_name):  # 传入存储的list
    print(path)
    print(list_name)
    for file in os.listdir(path):
        file_path = os.path.join(path, file)
        if os.path.isdir(file_path):
            listdir(file_path, list_name)
        else:
            if ".net/smslog" in file_path:
                list_name.append(file_path)
            continue
#path="E:/lc_data/data2/merchantdata/attachment//"
path="/data2/wwwroot/dev-saas-thailand/public/attachment_data2/moneygo.th.loancloudth.net/smslog/"
list_name = []
listdir(path, list_name)
#%密钥
key='48519f7557e1b0f05c65a0455dc64fb1'
print(key)
#rc4解密函数
def rct_algorithm(data,key):
    data=base64.b64decode(data)
    key=bytes(key,encoding='utf-8')
    enc=rc4cipher.new(key)
    res=enc.decrypt(data)
    res=str(res,'utf8')
    return res

#%%短信判断模型
def data_process(a):
    sms_body = []
    for i in range(len(a)):
        sms_body.append(a[i]['sms_body'])
    sms_body = pd.DataFrame(sms_body, columns=['sms_body'])
    #feature_path = 'E:/lc_data/message_new/tai_feature_dict.pkl'
    feature_path = '/home/message-risk/tai_feature_dict.pkl'
    #tfidftransformer_path = 'E:/lc_data/message_new/tfidftransformer_dict.pkl'
    tfidftransformer_path = '/home/message-risk/tai_tfidftransformer_dict.pkl'
    loaded_vec = CountVectorizer(decode_error="replace", vocabulary=pickle.load(open(feature_path, "rb")))
    tfidftransformer = pickle.load(open(tfidftransformer_path, "rb"))
    test_tfidf = tfidftransformer.transform(loaded_vec.transform(sms_body['sms_body'].values.astype('U')))
    #model_path = 'E:/lc_data/message_new/tai_message_model.pkl'
    model_path = '/home/message-risk/tai_message_model.pkl'
    clf = pickle.load(open(model_path, "rb"))
    val_pred_ridge = clf.predict(test_tfidf)
    p1 = re.compile(r'[<](.*?)[>]', re.S)
    p2 = re.compile(r'[[](.*?)[]]', re.S)
    for i in range(len(a)):
        a[i]['event'] = int(val_pred_ridge.tolist()[i])
        a[i]['sms_send'] = ''.join(re.findall(p1, a[i]['sms_body']))
        if a[i]['sms_send'] =='':
            a[i]['sms_send'] = ''.join(re.findall(p2, a[i]['sms_body']))
        a[i]['sms_send']=a[i]['sms_send'].replace('?','')

    return a

# 读取数据，并写入数据库
def reviewdata_insert(db,path):
    list = []
    with open(path, 'r') as f:
        for line in f.readlines():
            list.append(line)
    data = list[0]
    data_sms = rct_algorithm(data, key)
    sms = json.loads(data_sms)
    create_time = datetime.datetime.now()
    sms_result = data_process(sms)
    result = []
    for i in range(len(sms_result)):
        if sms_result[i]['event']==24:
            continue
        if sms_result[i]['sms_send']=='':
            continue
        result.append([sms_result[i]['name'],sms_result[i]['phone'],sms_result[i]['sms_send'],sms_result[i]['sms_body'],sms_result[i]['sms_time']
                          ,sms_result[i]['sms_type'],sms_result[i]['event'],create_time])
    inesrt_re = "insert into lc_sms_model_analysis(name, phone, sms_send,sms_body, sms_time, sms_type, event,create_time) values (%s, %s, %s, %s,%s, %s,%s,%s)"
    # %%处理分析结果，调整入库结构
    cursor = db.cursor()
    cursor.executemany(inesrt_re, result)
    db.commit()
if __name__ == "__main__": # 起到一个初始化或者调用函数的作用
  db = pymysql.connect(
    #服务器地址
        host = '192.168.0.154',
    #端口号
        port = 3306,
    #用户名
        user = 'root',
    #密码
        passwd = 'EKMqeTFcOaef1UG4',
    #数据库名称
        db = 'thailand_saas_center'
    )
  r = redis.Redis(host='192.168.0.146', password='CuYI0lD5JwJmOznA'
                  ,port=6379,db=8,decode_responses=True)
  read_redis = r.get('read_list')
  if read_redis==None:
      read_list=[]
  else:
      read_list=read_redis.split(",")
  print(list_name)
  for i in list_name:
      if i in read_list:
          continue
      read_redis=r.get('read_list')
      cursor=db.cursor()
      reviewdata_insert(db,i)
      cursor.close()
      read_list.append(i)
      read_list_str=','.join(read_list)
      r.set('read_list',read_list_str)

