# coding=utf-8
import redis
import re
import random
import time as t
import random
import os
import json
import requests


from concurrent.futures import ThreadPoolExecutor
from multiprocessing import Pool
import os


# def startCrawl():
#     """
#     多线程执行
#     """
#     p = Pool(processes=2)  # 使用两个进程同时爬取两个项目
#     p.map(crawlPart, (4, 6))


def main(projectId):
    """
    进程池调用的函数
    """
    # upload('point','')
    # 每个进行使用10个worker同时爬取多个监测项目
    monittyleList4 = [19, 18, 20, 22, 26]
    monittyleList6 = [34, 48, 38, 11, 41, 40,
                      50,  37, 31, 24, 33, 28, 30, 9, 8, 43]

    redisDataBase = redis.StrictRedis(host='localhost', port=6379, db=0)
    rawTimelist4 = redisDataBase.get("TotalTimes_Cache_proj4")
    timeList4 = json.loads(rawTimelist4)
    rawTimelist6 = redisDataBase.get("TotalTimes_Cache_proj6")
    timeList6 = json.loads(rawTimelist6)

    monittyleList = []  # 需要爬取的项目列表
    timelist = []
    res_list = []
    if projectId == 4:
        monittyleList = monittyleList4
        timelist = timeList4
    else:
        monittyleList = monittyleList6
        timelist = timeList6
    # 一次性爬取多个项目，按监控日期进行
    for i in timelist:
        res = tp.submit(crawlCommonData, projectId,
                        monittyleList, i["MonitorTime"])
        res_list.append(res)

    # 以下循环是multiprocessing.Pool情况下再嵌套concurrent.futures.ThreadPoolExecutor时必须的,
    # 否则子进程不会等待多线程执行完
    for j in res_list:
        '''在子进程调用main执行完毕之前
        通过调用future.result()循环阻塞每个子线程。
        只是这里第一次阻塞的时间里，其他线程基本上也就完成了(相同的)任务，
        所以这里可以认为只有一次sleep_函数的执行时间'''
        j.result()
    # 子进程最后会执行该输出
    print(str(projectId)+"执行成功")


def crawlSummary(projectId=6, time=420):
    """"
    爬取警报数据预览,包含工程监测的项目以及预警阈值
    """
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36 Edg/84.0.522.52',
        'Accept': 'application/json, text/javascript, */*; q=0.01',
        'Accept-Encoding': 'gzip, deflate',
        'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
        'cookie': cookie,
        'Connection': 'keep-alive'
    }
    url = baseUrl+"/DataReport/GetMonitoringItemSummary"
    params = {
        "pid": projectId,
        "time": time
    }
    requestResult = requests.get(url=url, params=params, headers=headers)
    rawContent = requestResult.text
    key = "summary_"+str(time)+"_proj"+str(projectId)
    redisDataBase.set(key, rawContent)
    print(key)
    upload('summary',rawContent)
    print(key+"     uploaded")
    

def upload(type,data):
    # data=[{"ID":"","Version":"123","MonitorTime":"","MonitorDate":"","CurrentValue":"","CurrentVariance":"","TotalVariance":"","TotalWarningState":"","DailyVariance":"","DailyWarningState":"","MonitorPointEvent":"","IntervalDays":"","DataComponentType":"","LastestUpdateTime":"","PitProjectID":"","MonitorProjectID":"","MonitorPointID":"","PointName":"","Code":"","PointDepth":"","PointAlarmRank":"","Note":"","MonitorDateYear":"","MonitorDateMonth":"","MonitorDateDay":"","HoleElevation":"","InitialValue":"123","FILE_FSET":""}]
    #1.监测点值，2.监测简报 3.钻孔数据
    urlList=['http://47.101.219.119:8099/api/mainserver/system/MPDA/create?prj=shensui&datasetid=32389bde4e474ad98c41d5753cac9cd0','http://47.101.219.119:8099/api/mainserver/system/MSUM/create?prj=shensui&datasetid=03669d30686f48a4a039c0117ca715f6','http://47.101.219.119:8099/api/mainserver/system/BVDM/create?prj=shensui&datasetid=95ed29b9134546348be169fd5d81bf74']

    url=''
    contentlength=0
    if type=='point':
        url=urlList[0]
        contentlength=520
    elif type=='summary':
        url=urlList[1]
        contentlength=523
    elif type=='holes':
        contentlength=506
        url=urlList[2]
    
    headers={
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.125 Safari/537.36 Edg/84.0.522.59',
        'Accept': 'application/json, text/plain, */*',
        'Accept-Encoding': 'gzip, deflate',
        'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
        'Connection': 'keep-alive',
        'Content-Type':'application/json;charset=UTF-8',
        'Host':'47.101.219.119:8099',
        'Origin':'http://tue2.is3.org.cn'
    }
    data=json.load(data)
    result=requests.post(url=url,data=data.encode("utf-8"))
    # result=requests.post(url,data=json.dumps(data).encode('utf-8'))
    print(result)

def crawlCommonData(projectId, list, time):
    """    
    通用数据挖掘    
    """
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36 Edg/84.0.522.52',
        'Accept': 'text/plain, */*; q=0.01',
        'Accept-Encoding': 'gzip, deflate',
        'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
        'cookie': cookie,
        'Connection': 'keep-alive'
    }
    url = baseUrl+"/DataReport/GetPrjPointDatas"
    redisDataBase = redis.StrictRedis(host='localhost', port=6379, db=0)
    # crawlSummary(projectId, time)
    for Monitortype in list:
        params = {
            "pid": projectId,
            "time": time,
            "prjID": Monitortype
        }
        requestResult = requests.get(url=url, params=params, headers=headers)
        rawContent = requestResult.text
        key = MonitTypeDefinition[str(Monitortype)] + \
            "_"+str(time)+"_proj"+str(projectId)
        redisDataBase.set(key, rawContent)
        print(key)
        upload('point',rawContent)
        print(key+"     uploaded")
    if projectId == 4:
        url = baseUrl+"/DataReport/GetPrjHoleDatas"
        params = {
            "pid": projectId,
            "time": time,
            "prjID": 10
        }
        rawContent = requestResult.text
        key = MonitTypeDefinition[str(Monitortype)] + \
            "_"+str(time)+"_proj"+str(projectId)
        redisDataBase.set(key, rawContent)
        print(key)
        upload('holes',rawContent)
        print(key+"    uploaded")
        


def crawlBoreholeData(Monitortype=10, projectId=6, time=420):
    """
    钻孔数据挖掘
    """
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36 Edg/84.0.522.52',
        'Accept': 'text/plain, */*; q=0.01',
        'Accept-Encoding': 'gzip, deflate',
        'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
        'cookie': cookie,
        'Connection': 'keep-alive'
    }
    url = baseUrl+"/DataReport//GetPrjHoleDatas"
    params = {
        "pid": projectId,
        "time": time,
        "prjID": 10
    }
    requestResult = requests.get(url=url, params=params, headers=headers)
    rawContent = requestResult.text
    key = MonitTypeDefinition[str(Monitortype)] + \
        "_"+str(time)+"_proj"+str(projectId)
    redisDataBase.set(key, rawContent)


def crawlPipeHorizental(projectId=6, time=420):
    """
    地下管线水平位移
    """
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36 Edg/84.0.522.52',
        'Accept': 'text/plain, */*; q=0.01',
        'Accept-Encoding': 'gzip, deflate',
        'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
        'cookie': cookie,
        'Connection': 'keep-alive'
    }
    url = baseUrl+"/DataReport/GetPrjPointDatas"
    params = {
        "pid": projectId,
        "time": time,
        "prjID": 19
    }
    requestResult = requests.get(url=url, params=params, headers=headers)
    rawContent = requestResult.text
    key = "HorizonDisOfPipe_"+str(time)+"_proj"+str(projectId)
    redisDataBase.set(key, rawContent)


def crawlPipeVertical(projectId=6, time=420):
    """
    地下管线垂直位移
    """
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36 Edg/84.0.522.52',
        'Accept': 'text/plain, */*; q=0.01',
        'Accept-Encoding': 'gzip, deflate',
        'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
        'cookie': cookie,
        'Connection': 'keep-alive'
    }
    url = baseUrl+"/DataReport/GetPrjPointDatas"
    params = {
        "pid": projectId,
        "time": time,
        "prjID": 20
    }
    requestResult = requests.get(url=url, params=params, headers=headers)
    rawContent = requestResult.text
    key = "VerticalDisOfPipe_"+str(time)+"_proj"+str(projectId)
    redisDataBase.set(key, rawContent)


def crawlBuildingVertical(projectId=6, time=420):
    """
    房屋垂直位移
    """
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36 Edg/84.0.522.52',
        'Accept': 'text/plain, */*; q=0.01',
        'Accept-Encoding': 'gzip, deflate',
        'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
        'cookie': cookie,
        'Connection': 'keep-alive'
    }
    url = baseUrl+"/DataReport/GetPrjPointDatas"
    params = {
        "pid": projectId,
        "time": time,
        "prjID": 20
    }
    requestResult = requests.get(url=url, params=params, headers=headers)
    rawContent = requestResult.text
    key = "VerticalDisOfBuilding_"+str(time)+"_proj"+str(projectId)
    redisDataBase.set(key, rawContent)


def crawlGroundVertical(projectId=6, time=420):
    """
    地表剖面垂直位移
    """
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36 Edg/84.0.522.52',
        'Accept': 'text/plain, */*; q=0.01',
        'Accept-Encoding': 'gzip, deflate',
        'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
        'cookie': cookie,
        'Connection': 'keep-alive'
    }
    url = baseUrl+"/DataReport/GetPrjPointDatas"
    params = {
        "pid": projectId,
        "time": time,
        "prjID": 26
    }
    requestResult = requests.get(url=url, params=params, headers=headers)
    rawContent = requestResult.text
    key = "VerticalDisOfGround_"+str(time)+"_proj"+str(projectId)
    redisDataBase.set(key, rawContent)


def getTotalTimes(projectId=6):
    """
    # 目前项目的监测条目总数
    """
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36 Edg/84.0.522.52',
        'Accept': 'text/plain, */*; q=0.01',
        'Accept-Encoding': 'gzip, deflate',
        'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
        'cookie': cookie,
        'Connection': 'keep-alive'
    }
    url = baseUrl+"/DataReport/GetMonitorTimes"
    params = {
        "pid": projectId
    }
    rawResult = requests.get(url=url, params=params, headers=headers)
    plainText = rawResult.text
    jsonData = rawResult.json()
    key = "TotalTimes_Cache_proj"+str(projectId)
    redisDataBase.set(key, plainText)
    return jsonData


def getTotalTimes4Auto(projectId=6):
    """
    # 目前项目的监测条目总数(自动化监测)
    """
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36 Edg/84.0.522.52',
        'Accept': 'text/plain, */*; q=0.01',
        'Accept-Encoding': 'gzip, deflate',
        'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
        'cookie': cookie,
        'Connection': 'keep-alive'
    }
    url = baseUrl+"/Monitoring/GetMonitorTimes"
    params = {
        "pid": projectId
    }
    rawResult = requests.get(url=url, params=params, headers=headers)
    plainText = rawResult.text
    jsonData = rawResult.json()
    key = "TotalTimes_Cache_proj"+str(projectId)
    redisDataBase.set(key, plainText)
    return jsonData


def showTotalTimes(projectId=6):
    """
    # 展示最新的数据
    """
    str1 = redisDataBase.get("TotalTimes_Cache_proj"+str(projectId))
    jsonData = json.loads(str1)
    maxtime = jsonData[0]["MonitorTime"]
    date = jsonData[0]["MonitorDateStr"]

    str2 = redisDataBase.get("TotalTimes_HistoricalCache_proj"+str(projectId))
    oldjsonData = json.loads(str2)
    oldmaxtime = oldjsonData[0]["MonitorTime"]
    olddate = oldjsonData[0]["MonitorDateStr"]
    print("已爬取至"+olddate, "  对应编号为"+str(oldmaxtime))
    print("最新监测数据为"+date, "  对应编号为"+str(maxtime))
    return [oldmaxtime, maxtime]


def getTime4Crawl(projectId):
    """
    获取需要爬取的新的数据记录
    """
    str1 = redisDataBase.get("TotalTimes_Cache_proj"+str(projectId))
    jsonData = json.loads(str1)
    maxtime = jsonData[0]["MonitorTime"]
    date = jsonData[0]["MonitorDateStr"]

    str2 = redisDataBase.get("TotalTimes_HistoricalCache_proj"+str(projectId))
    oldjsonData = json.loads(str2)
    oldmaxtime = oldjsonData[0]["MonitorTime"]
    olddate = oldjsonData[0]["MonitorDateStr"]
    return [oldmaxtime, maxtime]


def globalVariance():
    """
    定义全局变量
    """
    global cookie
    print("输入从浏览器获得的cookie")
    # cookie=input()
    cookie = "ss_TongJi-projectId=6; _shensui=F30063E61731D1158DAC2ADD740AF58F3E27E963A3592C89A4CA02B9911E37D5EC67D6658FDFE308E4A3B575A679DFCB05FEBFE956746617746584E1F0267B4E5830B39C719B4FB64E65C6D78CE0737F5288109451DAA7B216B0E6E39BF8DE410452A218EE773FF8D46F097FD7E395F9C53C84E39564E7D4732876C9AD5B97872B1E53434A7F659FA1F2854E3678B8ED"

    global redisDataBase
    redisDataBase = redis.StrictRedis(
        host='localhost', port=6379, db=0)  # 内存数据库，用于数据缓存，方便后续一次性存储
    global baseUrl
    baseUrl = "http://47.97.46.65:8833"
    global MonitTypeDefinition
    MonitTypeDefinition = {
        "19": "HorizonDisOfPipe",
        "26": "VerticalDisOfGround",
        "20": "VerticalDisOfBuilding",
        "18": "VerticalDisOfPipe",
        "22": "VerticalDisOfWall",
        "34": "VerticalDisOfYunling",  # 云岭水箱竖向
        "48": "HorizonDisOfYunling",  # 云岭建筑水平
        "38": "VerticalDisOfLayer",  # 分层竖向位移
        "11": "VerticalDisOfGround",  # 地表剖面竖向
        "41": "HorizonDisOfSupportTop",  # 围护顶部水平
        "40": "VerticalDisOfSupportTop",  # 围护顶部竖向
        "50": "RoundnessOfShaft",  # 竖井圆度
        "10": "HorizonDisOfOutGround",  # 坑外深层土体水平
        "37": "DepthOfOutPresWater",  # 坑外承压水位
        "31": "DepthOfOutDiviWater",  # 坑外潜水
        "24": "VerticalDisOfNewFloodwall",  # 新防汛墙竖向
        "33": "VerticalDisOfBridgepier",  # 桥墩竖向
        "28": "VerticalDisOfPumpstation",  # 泵站建筑竖向
        "30": "VerticalDisOfPumppipe",  # 泵站顶管竖向
        "9": "HorizonDisOfFloodwall",  # 防汛墙水平
        "8": "VerticalDisOfFloodwall",  # 防汛墙竖向
        "43": "HorizonDisOfDeepSupport"  # 深层围护水平
    }


def main1():
    globalVariance()
    # 手动监测
    # http://47.97.46.65:8833/WorkSystem/GetProjectList
    # id 4:云岭
    # ID 6:苗圃
    projList = [4, 6]
    for proj in projList:
        getTotalTimes(proj)
        # showTotalTimes(proj)
    print("按回车开始爬取最新数据")
    x = input()
    # startCrawl()
    # Monitoring/GetPrjPointDatas?pid=51
    # /Monitoring/GetBoreHoleCurve?holeID=51&time=2020-08-08+16%3A00%3A00


def main4AutoMonitor():
    """
    用于自动化监测数据爬取
    """
    globalVariance()
    # http://47.97.46.65:8833/Monitoring/GetAutoSubProjects
    # id 12:云岭
    # ID 11:苗圃
    projList = [4, 6]
    for proj in projList:
        getTotalTimes(proj)
        showTotalTimes(proj)
    print("开始爬取最新数据")
    input()
    # startCrawl()


def test():
    """
    测试用主程序
    """
    globalVariance()
    # startCrawl()


max_workers = 10
tp = ThreadPoolExecutor(max_workers=max_workers)
print("输入从浏览器获得的cookie")
# cookie=input()
cookie = "ss_TongJi-projectId=6; _shensui=F30063E61731D1158DAC2ADD740AF58F3E27E963A3592C89A4CA02B9911E37D5EC67D6658FDFE308E4A3B575A679DFCB05FEBFE956746617746584E1F0267B4E5830B39C719B4FB64E65C6D78CE0737F5288109451DAA7B216B0E6E39BF8DE410452A218EE773FF8D46F097FD7E395F9C53C84E39564E7D4732876C9AD5B97872B1E53434A7F659FA1F2854E3678B8ED"

redisDataBase = redis.StrictRedis(
    host='localhost', port=6379, db=0)  # 内存数据库，用于数据缓存，方便后续一次性存储

baseUrl = "http://47.97.46.65:8833"
MonitTypeDefinition = {
    "19": "HorizonDisOfPipe",
    "26": "VerticalDisOfGround",
    "20": "VerticalDisOfBuilding",
    "18": "VerticalDisOfPipe",
    "22": "VerticalDisOfWall",
    "34": "VerticalDisOfYunling",  # 云岭水箱竖向
    "48": "HorizonDisOfYunling",  # 云岭建筑水平
    "38": "VerticalDisOfLayer",  # 分层竖向位移
    "11": "VerticalDisOfGround",  # 地表剖面竖向
    "41": "HorizonDisOfSupportTop",  # 围护顶部水平
    "40": "VerticalDisOfSupportTop",  # 围护顶部竖向
    "50": "RoundnessOfShaft",  # 竖井圆度
    "10": "HorizonDisOfOutGround",  # 坑外深层土体水平
    "37": "DepthOfOutPresWater",  # 坑外承压水位
    "31": "DepthOfOutDiviWater",  # 坑外潜水
    "24": "VerticalDisOfNewFloodwall",  # 新防汛墙竖向
    "33": "VerticalDisOfBridgepier",  # 桥墩竖向
    "28": "VerticalDisOfPumpstation",  # 泵站建筑竖向
    "30": "VerticalDisOfPumppipe",  # 泵站顶管竖向
    "9": "HorizonDisOfFloodwall",  # 防汛墙水平
    "8": "VerticalDisOfFloodwall",  # 防汛墙竖向
    "43": "HorizonDisOfDeepSupport"  # 深层围护水平
}
# main1()
# crawlSummary(6,410)
if __name__ == '__main__':
    p = Pool(processes=2)
    p.map(main, (4, 6))
