from splinter import Browser
import bs4
import time,io,sqlite3,datetime,math
##浏览器自动操作
def autoBrowser():
    #browser = Browser("chrome")
    browser = Browser("chrome",headless=True)
    url = "http://aiportal.unicom.local/modules/login/login.html"
    browser.visit(url)
    #time.sleep(1)
    browser.fill("login","zhaoweiming")
    browser.fill("password","Zhaowm$314")
    #time.sleep(1)
    browser.find_by_text("登录").click()
    print("登录成功！")
    browser.visit("http://10.249.217.120/uflow/unicomDM.jsp?check_login=4a&token_id=emhhb3dlaW1pbmc=")
    print("打开需求统一管理平台！")
    browser.find_by_text("综合查询").click()
    #browser.find_by_text("导出").click()
    browser.find_by_text("本省需求查询").click()
    time.sleep(2)
    browser.find_by_name('domain').click()
    browser.find_by_text("B域").click()
    browser.find_by_name('xqlx').click()
    browser.find_by_text("省分自建").click()
    #browser.find_by_name('status').click()
    #browser.find_by_text("评估").click()
    #browser.find_by_text("确认").click()
    browser.find_by_id("advancedSearch_btn").click()
    return browser
#获取需求详情
def orderDetail(orderId):
    #打开需求详情页面
    browser.find_by_text(orderId).click()
    #设置详情窗口为当前窗口
    window = browser.windows[1]
    window.is_current = True
    #分析html
    soup = bs4.BeautifulSoup(browser.html,"html5lib")
    # 获取需求详情
    data_list = {}
    x = 0
    xqxq = soup.find_all('div',class_='record details')
    for list in xqxq[0].find_all('dt'):
        data_list[list.get_text()] = xqxq[0].find_all('dd')[x].get_text()
        x+=1
    try: xqbh = data_list['需求编号：']
    except KeyError: return
    try: xqbt = data_list['需求标题：']
    except KeyError: xqbt = ""
    try: xqgs = data_list['需求概述：']
    except KeyError: xqgs = ""
    try: jslx = data_list['建设类型：']
    except KeyError: jslx = ""
    try: xqly = data_list['需求领域：']
    except KeyError: xqly = ""
    try: xqtcsj = data_list['需求提出时间：']
    except KeyError: xqtcsj = ""
    try: qwsxsj = data_list['期望上线时间：']
    except KeyError: qwsxsj = ""
    try: xqtcr = data_list['需求提出人：']
    except KeyError: xqtcr = ""

    try: jhsxsj = data_list['计划上线时间：']
    except KeyError: jhsxsj = ""
    try: sjsxsj = data_list['实际上线时间：']
    except KeyError: sjsxsj = ""
    try: pggs = data_list['评估工时（人天）：']
    except KeyError: pggs = ""
    try: sjgs = data_list['实际工时（人天）：']
    except KeyError: sjgs = ""

    try: xqjl = data_list['需求经理：']
    except KeyError: xqjl = ""
    try: xqbm = data_list['所属单位：']
    except KeyError: xqbm = ""
    ##数据入库
    cur2 = conn.cursor()
    res = cur2.execute("select count(*) from order_detail where order_id='%s'" % (orderId))
    for row in res: isHave = row[0]
    if isHave == 0:
        sql = "insert into order_detail(order_id, order_name, order_detail, order_type, order_fields, create_time, Expect_time, " \
              "create_staff, create_depart, order_manager, order_depart, plan_up_time, real_up_time, plan_Workload, real_workload) " \
              "values('%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" % (xqbh,xqbt,xqgs,jslx,xqly,xqtcsj,qwsxsj,
                                                                                  xqtcr,"",xqjl,xqbm,jhsxsj,sjsxsj,pggs,sjgs)
        cur2.execute(sql)
        conn.commit()
    else:
        sql = "update order_detail set order_manager='%s',order_depart='%s',plan_up_time='%s', real_up_time='%s', plan_Workload='%s', real_workload='%s' " \
              "where order_id='%s'" % (xqjl,xqbm,orderId,jhsxsj,sjsxsj,pggs,sjgs)
        cur2.execute(sql)
        conn.commit()
    cur2.close()
    window.close()
    window = browser.windows[0]
    window.is_current = True
##解析table对象数据入库
def parsTable(table):
    for rec in table:
        cur = conn.cursor()
        try:
            orderId = rec.contents[0].span.get_text() #需求编号
            orderName = rec.contents[1].span.get_text() #需求名称
            createTime = str.replace(rec.contents[5].span.get_text(),'   ',' ') #创建时间
            status = rec.contents[8].span.get_text() #需求状态
            strline = "需求编号："+orderId+",需求名称："+orderName+",需求提出时间："+createTime+",状态："+status
            print(strline)
            #file.write(strline+"\n")
            res = cur.execute("select count(*),status from xq_order_t where order_id='%s' group by status" % (orderId))
            localtime = datetime.datetime.now()
            timestamp = localtime.strftime("%Y-%m-%d %H:%M:%S")
            isHave = 0
            dbStatus = "未知"
            for row in res:
                print(row[0],row[1],"->",status)
                isHave = int(row[0])
                dbStatus = row[1]
            if isHave == 0:    #如果需求不存在则插入新记录
                insStr = "insert into main.xq_order_t(order_id, order_name, create_time, status, update_time) values('%s','%s','%s','%s','%s')" %\
                             (orderId,orderName,createTime,status,timestamp)
                print(insStr)
                cur.execute(insStr)
                conn.commit()
            elif dbStatus != status:   #否则，如果状态不一致更新
                upStr = "update main.xq_order_t set status = '%s',update_time = '%s' where order_id = '%s'" %\
                        (status,timestamp,orderId)
                print(upStr)
                cur.execute(upStr)
                conn.commit()
            cur.close()
            if isHave == 0 or dbStatus != status:
                orderDetail(orderId)
        except AttributeError:
            continue

conn = sqlite3.connect("database\data",check_same_thread=False)
browser = autoBrowser()
print("查询需求列表！")
time.sleep(2)
soup = bs4.BeautifulSoup(browser.html,"html5lib")
#获取需求总记录数
rowCnt = int(str.replace(soup.find_all(id = "allQuery_grid")[0].find_all("em")[0].get_text(),"条数据","").replace("共",""))
#计算页数
pageCnt = math.ceil(int(rowCnt)/13)
x = 1
while x <= pageCnt:
    time.sleep(1)
    soup = bs4.BeautifulSoup(browser.html, "html5lib")
    table = soup.find_all("table")[0].find_all(["tr"])
    print("处理列表")
    parsTable(table)
    time.sleep(2)
    print("切换下一页")
    browser.find_by_text("下一页").click()
    x+=1
conn.close()