import asyncio
import aiohttp
from urllib.parse import parse_qsl, urlsplit
from faker import Faker
import pandas as pd
import aiomysql
import snowflake.client
import re
import json
from aiomultiprocess import Pool
import aiofiles
import time
from datetime import datetime
import logging

log = logging.getLogger(__name__)
# 链接服务端并初始化一个pysnowflake客户端
host = 'localhost'
port = 8910
snowflake.client.setup(host, port)


def random_ua():
    ua = Faker()
    return ua.user_agent()


def split_url(url):
    path = urlsplit(str(url))
    # 有密码
    if path.query:
        return dict(parse_qsl(path.query)).get('surl'), True
    # 无密码
    else:
        return path.path, False


# es数据库 snow_id filename content

# 对于不可用的资源 新建立一张表
# 百度网盘有四个时间   local 是指本地文件的创建时间   server_time 是上传到服务器的时间

async def register():
    '''
    初始化，获取数据库连接池
    :return:
    '''
    try:
        print("start to connect db!")
        pool = await aiomysql.create_pool(host='127.0.0.1', port=3306,
                                          user='root', password='416798Gao!',
                                          db='pan', charset='utf8mb4')
        print("succeed to connect db!")
        return pool
    except asyncio.CancelledError:
        raise asyncio.CancelledError
    except Exception as ex:
        print("mysql数据库连接失败：{}".format(ex.args[0]))
        return False


def t2d(timeStamp):
    timeArray = time.localtime(timeStamp)
    otherStyleTime = time.strftime("%Y-%m-%d %H:%M:%S", timeArray)
    return otherStyleTime


def d2t(t):
    timeArray = time.strptime(t, "%Y-%m-%d %H:%M:%S")
    # 转为时间戳
    timeStamp = int(time.mktime(timeArray))
    return timeStamp


async def batchInsert(items):
    """
     `ctime`,  创建时间
     `mtime`, 修改时间
     `stime`, 分享时间   按照分享时间排序
     `utime` 爬取时间
    :param items:
    :return:
    """

    sql = "insert into online(`snow_id`, `source`, `source_id`, `filename`, `url`, `pwd`, `size`,`share_id`,`share_uk`, `type`,`ext`, `valid`, `ctime`,`mtime`,`stime`,`utime`) values (%s, %s, %s, %s, %s,%s, %s, %s, %s,%s, %s, %s, %s,%s, %s, %s)"

    values = []
    items = items[['category', 'client', 'ext', 'filename', 'id', 'isdir']].iterrows()
    for idx, item in items:
        try:
            # print(list(item))
            sid = item['id']
            category = item['category']
            snow_id = get_snow_id()  # es数据
            source = item['client']
            detail = await get_detail(sid, 'detail')
            name = detail['filename']  # es数据
            pwd = detail['pwd']
            ctime = detail['ctime']
            mtime = detail['utime']
            isdir = detail['isdir']
            size = str(detail['size'])

            # filelist = detail['filelist']  # es数据
            get_url = await get_detail(sid, 'url')
            url = get_url['data']
            # 文件类型，1 视频、2 音频、3 图片、4 文档、5 应用、6 其他、7 种子、8 文件夹
            if isdir == 1:
                ext = 'dir'
                type = 8
            else:
                ext = item['ext']
                type = category

        except:
            sid = item['id']
            with open('errors.txt', 'a+') as f:
                f.write(str(sid) + '\n')
            continue
        # get_extract = await extract(url, pwd)
        # share_uk, stime, share_id = get_extract

        # valid 用户点击动态上报
        # `share_id`,`share_uk`,  `stime`  需要单独爬取
        # `snow_id`, `source`, `name`, `url`, `pwd`, `size`,`share_id`,`share_uk`, `type`,`ext`, `valid`, `ctime`,`utime`,`stime`
        # print(snow_id, source, name, url, pwd, size, ext, '', '', type, '1', ctime, utime, None)
        print(name, sid)
        per = (
            snow_id, source, sid, name, url, pwd, size, 0, '', type, ext, '1', ctime, mtime, None,
            datetime.now())
        values.append(per)

    # 第一步获取连接和cursor对象
    # 这样消耗资源较少
    conn = await aiomysql.connect(host="127.0.0.1", port=3306,
                                  user='root', password='416798Gao!',
                                  db='pan', charset='utf8')
    cur = await conn.cursor()
    try:
        # 执行sql命令
        # print(per)
        # print(sql % per)
        # await cur.execute(sql, per)
        await cur.executemany(sql, values)
        await conn.commit()
    except Exception as e:
        # 记录错误
        # Warning: Data truncated for column 'pwd' at row 21
        #   await self._query(query)
        log.error(e)
    finally:
        # 最后不能忘记释放掉连接，否则最终关闭连接池会有问题
        await cur.close()
        conn.close()


async def get_detail(sid, source='detail'):
    headers = {
        "X-Authorization": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1bmlvbmlkIjoib2JoZWFzNGVyY05JN01reTRyNFFuOVlDNVlnRSIsImlhdCI6MTYyMTc2MDEwNCwiZXhwIjoxNjIxNzcwOTA0fQ.731BfvrJdz9IatYe8x-grVwhLebDWiDo1XpP7tetbx8"
    }
    if source == 'detail':
        url = 'https://api.dalipan.com/api/v1/pan/detail?id={}'.format(sid)
        async with aiohttp.ClientSession() as session:
            async with session.get(url) as rep:
                return await rep.json()
    elif source == 'url':
        url = 'https://api.xiaomapan.com/api/v1/pan/url?id={}'.format(sid)
        async with aiohttp.ClientSession(headers=headers) as session:
            async with session.get(url) as rep:
                return await rep.json()
    else:
        url = f'https://api.xiaomapan.com/api/v1/pan/checkUrlValidFromBaidu?data={sid}'


async def close(pool):
    pool.close()
    await pool.wait_closed()
    print("close pool!")


def get_snow_id():
    return snowflake.client.get_guid()


async def req():
    res = pd.read_csv('o2.csv', encoding='utf-8', iterator=True, low_memory=False, chunksize=100)

    async with Pool() as pool1:
        # con = res.get_chunk()[['category', 'client', 'ext', 'filename', 'id', 'isdir']].iterrows()
        await pool1.map(batchInsert, res)


if __name__ == '__main__':
    # 完成 异常社区 消息通知 一文推送 网赚 数据库的建设 电影大管家
    # pass
    # truncate table pan; 自增从1
    # delete * from pan;
    # loop = asyncio.get_event_loop()
    # loop.run_until_complete(read_demo())

    loop = asyncio.get_event_loop()

    # map分配
    coroutine = req()
    task = asyncio.ensure_future(coroutine)
    loop.run_until_complete(task)
