import os

import requests
import time
from sqlalchemy import create_engine
from zipfile import ZipFile
import pandas as pd
import random





def login():
    login_url = 'http://sw.eccang.com/default/index/login'
    headers = {
         "Accept": "application/json, text/javascript, */*; q=0.01",
        "Accept-Encoding": "gzip, deflate",
        "Accept-Language": "zh-CN,zh;q=0.9",
        "Cache-Control": "no-cache",
        "Connection": "keep-alive",
        "Content-Length": "56",
        "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
        "Host": "sw.eccang.com",
        "Origin": "http://sw.eccang.com",
        "Pragma": "no-cache",
        "Referer": "http://sw.eccang.com/default/index/logout?company_code=sw",
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4455.2 Safari/537.36",
        "X-Requested-With": "XMLHttpRequest",
    }
    body = {
        # "userName": "jinhuaitao",
        # "userPass": "jht834719#",
        "userName": "huangzhengchao",
        "userPass": "hzc@123456",
        "companyCode": ""
    }
    try:
        res = requests.post(url=login_url, headers=headers, data=body)
        cookies = res.cookies.items()
        # print(cookies)
        cookie = ''
        for name, value in cookies:
            cookie += '{0}={1};'.format(name, value)
        print('UM_distinctid=17c15b16b7275-02b55430879ff9-2343360-1fa400-17c15b16b7376d;' + cookie)
        return cookie
    except Exception as err:
        print('获取cookie失败：\n{0}'.format(err))



engine = create_engine('mysql+pymysql://jinhuitao:jinhuitao_123456@192.168.2.205:3306/sunway_db')


def time_rdf_id(Cookies):  # 获取时间与rdf_id

    url = 'http://sw.eccang.com/statistics/customer-report-center/stockdata-module-report-list/page/1/pageSize/20'
    headers = {
        'Cookie': '{}'.format(Cookies),
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.107 Safari/537.36',
    }
    data = {
        # 'code': 'STOCK_DATA',
        # 'report_id': '263',
        # 'page': '1',
        # 'pageSize': '20',
        # 'is_export_desc': '0',
        # 'platform': '',
        # 'account': '',
        # 'searchCode': '',
        # 'product_barcode': '',
        # 'category[]': '',
        # 'operationUserType': 'buyer_id',
        # 'person': '',
        # 'searchDateType': 'product_add_time',
        # 'dateFor': '',
        # 'dateTo': '',
        'code': 'STOCK_DATA',
        'report_id': '263',
        'page': '',
        'pageSize': '',
        'is_export_desc': '0',
        'platform': '',
        'account': '',
        'searchCode': '',
        'product_barcode': '',
        'category[]': '',
        'operationUserType': 'buyer_id',
        'person': '',
        'searchDateType': 'product_add_time',
        'dateFor': '',
        'dateTo': '',
    }
    time1 = time.strftime('%Y-%m-%d')
    res = requests.post(url, headers=headers, data=data).json()
    print(res)
    for rdf in res["data"]:
        if rdf["rd_finish_time"][:11].replace(' ', '') == str(time1):
            requests_zip1(rdf["rdf_id"],Cookies)
            break


def requests_zip1(rdf,Cookies):  # 请求下载zip1
    url = 'http://sw.eccang.com/common/report-data/down-zip'
    headers = {
        'Host': 'sw.eccang.com',
        'Connection': 'keep-alive',
        'Cache-Control': 'max-age=0',
        'Upgrade-Insecure-Requests': '1',
        'Origin': 'null',
        'Content-Type': 'application/x-www-form-urlencoded',
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.69 Safari/537.36',
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
        'Accept-Encoding': 'gzip, deflate',
        'Accept-Language': 'zh-CN,zh;q=0.9',
        'Cookie': '{}'.format(Cookies),
    }
    rep = requests.post(url=url, headers=headers, data="rdf_ids[]={}".format(rdf), verify=False,
                        allow_redirects=False)  # allow_redirects是重定向
    # print(rep.headers["Location"])
    requests_zip2(rep.headers["Location"], rdf)
    dele(rdf)


def requests_zip2(rep_Location, rdf):  # 请求下载zip2
    # headers = {
    #     'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
    #     'Accept-Encoding': 'gzip, deflate, br',
    #     'Accept-Language': 'zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7',
    #     'Cache-Control': 'max-age=0',
    #     'Connection': 'keep-alive',
    #     'Host': 'erp-huanan.oss-cn-shenzhen.aliyuncs.com',
    #     'sec-ch-ua': '"Chromium";v="92", " Not A;Brand";v="99", "Google Chrome";v="92"',
    #     'sec-ch-ua-mobile': '?0',
    #     'Sec-Fetch-Dest': 'frame',
    #     'Sec-Fetch-Mode': 'navigate',
    #     'Sec-Fetch-Site': 'cross-site',
    #     'Sec-Fetch-User': '?1',
    #     'Upgrade-Insecure-Requests': '1',
    #     'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.107 Safari/537.36', }
    url = '{}'.format(rep_Location)
    res = requests.get(url, verify=False)
    with open(f'{rdf}.zip', mode='wb') as g:
        g.write(res.content)
        myzip = ZipFile(f'{rdf}.zip')
        f = myzip.open('swSTOCK_DATA_{}.csv'.format(rdf))
        print('1111111111', rdf)
        df = pd.read_csv(f, header=3)
        l = pd.DataFrame(df)
        # l.to_excel(f'{rdf}.xlsx', index=False)
        # l.to_excel(f'zheng1.xlsx', index=False)

        l.to_sql('swSTOCK_DATA_nice', engine, index=True,if_exists='append')#if_exists='append'向同一个表中添加数据
        print('任务执行完毕！！！')


def shengcheng(Cookies):
    headers = {
        'Content - Length': '213',
        'Content - Type': 'application / x - www - form - urlencoded;charset = UTF - 8',
        'Host': 'sw.eccang.com',
        'Origin': 'http://sw.eccang.com',
        'Proxy-Connection': 'keep-alive',
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.107 Safari/537.36',
        'X-Requested-With': 'XMLHttpRequest',
        'cookie': '{}'.format(Cookies),
    }
    data = {
        'code': 'STOCK_DATA',
        'report_id': '263',
        'page': '1',
        'pageSize': '20',
        'is_export_desc': '0',
        'platform': '',
        'account': '',
        'searchCode': '',
        'product_barcode': '',
        'category[]': '',
        'operationUserType': 'buyer_id',
        'person': '',
        'searchDateType': 'product_add_time',
        'dateFor': '',
        'dateTo': ''
    }
    url = 'http://sw.eccang.com/statistics/customer-report-center/generate-report-for-stockdata'
    requests.post(url, headers=headers, data=data)


def dele(rdf):
    os.remove(f'{rdf}.zip')


def run():
    Cookies = login()
    shengcheng(Cookies)
    time.sleep(2400)
    time_rdf_id(Cookies)


from apscheduler.schedulers.blocking import BlockingScheduler
from datetime import datetime
import time


# 输出时间
def job():
    # scheduler = BlockingScheduler()
    # scheduler.add_job(run, 'cron', day_of_week='*', hour=8, minute=30)
    # scheduler.start()

    while 1:
        newtime = time.strftime('%H:%M')
        if str(newtime) == '08:30':
        # if str(newtime) in ['14:21','14:25','14:28','14:19']:
            run()
            # time.sleep(60)


if __name__ == '__main__':
    job()
