import requests
import json
import re
from pyquery import PyQuery as qp
import random
import demjson
import redis
import pymysql
import time
import  multiprocessing
import datetime

today = datetime.date.today().weekday()

now = time.time()
midnight = now - (now % 86400) + time.timezone
# if today == 0:
#     prenight = midnight - 86400*3#乘以3代表周五
# else:
prenight = midnight


end_time = prenight
timeArray = time.localtime(end_time)
otherStyleTime = time.strftime("%Y-%m-%d %H:%M:%S", timeArray)
print (otherStyleTime)
#
# now = midnight#当天凌晨时间
# print(now)


dbhost = '120.26.211.213'
dbuser = 'huanghai'
passwd = 'huanghai_password'
dbname = 'news_caiji'
conn = pymysql.connect(dbhost, dbuser, passwd, dbname, charset='utf8')
cur = conn.cursor()

def get_date(query):
        print(otherStyleTime)
        global token, data, Now_Time
        header = {'User-Agent':'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.75 Safari/537.36'}
        pool = redis.ConnectionPool(host='47.106.139.236', port=6379,db=1,password='zz121212')
        r = redis.Redis(connection_pool=pool)
        # r2 = redis.Redis(connection_pool=pool2)
        key = r.randomkey()
        cookie = demjson.decode(r.get(key).decode())
        print(cookie)
        url = 'https://mp.weixin.qq.com/'

        try:
            resp = requests.get(url, headers = header,cookies=cookie).url
            token = re.findall(r'token=(\d+)', str(resp))[0]
            data = {'action':'search_biz',
                    'token':token,
                    'lang':'zh_CN',
                    'f':'json',
                    'ajax':'1',
                    'random':random.random(),
                    'query':query,
                    'begin':'0',
                    'count':'5'}

            search_url = 'https://mp.weixin.qq.com/cgi-bin/searchbiz?'
            time.sleep(1)
            search_resp = requests.get(search_url,headers = header,cookies = cookie ,params=data)
            id_data = search_resp.json().get('list')[0].get('fakeid')
            wx_id = search_resp.json().get('list')[0].get('alias')
            post_data = {'token':token,
                        'lang':'zh_CN',
                        'f':'json',
                        'ajax':'1',
                        'random':random.random(),
                        'action':'list_ex',
                        'begin':'0',
                        'count':'5',
                        'query':'',
                        'fakeid':id_data,
                        'type':'9',}
            content_url = 'https://mp.weixin.qq.com/cgi-bin/appmsg?'
            time.sleep(1)
            contents = requests.get(content_url,headers = header,cookies=cookie,params=post_data).text
            all_info = json.loads(contents)
            app_msg = all_info.get('app_msg_list')
        except Exception as e:
            if e:
                print(query,"返回结果出错")
                get_date(query)
        try:
            for info,items in enumerate(app_msg):
                title = items.get('title')
                link = items.get('link')
                update_url = link.split('&')[:-1]
                contents_url = '&'.join(update_url)#截取过后的网址
                times = items.get('update_time')#原有的获取时间
                content_time = items.get('update_time')  # 原有的获取时间
                # endtime = r2.get('endtime').decode()
                # format_time = time.strptime(endtime, '%Y-%m-%d %H:%M:%S')
                # print(format_time)
                # end_time = int(time.mktime(format_time))
                Now_time = int(time.time())
                if end_time <= content_time <=Now_time:
                    top = re.findall(re.compile(r'idx=(\d+)&'),str(contents_url))[0]
                    html = requests.get(link,cookies=cookie).text
                    doc = qp(html)
                    original = doc('div .rich_media_meta_list #copyright_logo').text()
                    # author = re.findall(re.compile(r'作者 (.*)'),doc('div .rich_media_meta_list p.rich_media_meta_primary').text())[0]
                    contents = doc('div.rich_media_content').text()
                    content = re.compile(u'[\U00010000-\U0010ffff]').sub(u'', contents)
                    author = doc('div .rich_media_meta_list p.rich_media_meta_primary')
                    if author:
                        author =re.findall(re.compile(r'作者 (.*)'),author.text())[0]
                    else:
                        author = ''
                    # all_info = Weixin(link)
                    # read_num = all_info[0]
                    # like_num = all_info[1]
                    # sql = """insert into post(biz,title,content_url,datetime,readNum,likeNum,is_orig,bizname)values('%s','%s','%s','%s','%s','%s','%s','%s')""" %(id_data,title,link,times,read_num,like_num,original,query)
                    if query == 'fuguo1999':
                       query = '富国基金'
                    elif query == "js-fund":
                        query = "嘉实基金"
                    if top != '1':
                        top = 0
                    sql = """insert into post(biz,title,content,content_url,datetime,is_top,media_orig,is_orig,bizname)values(%s,%s,%s,%s,%s,%s,%s,%s,%s)"""
                    cur.execute(sql,(id_data, title, content,contents_url, times, top, author, original, query))
                    conn.commit()
                    print(title, times, original, author, query, contents_url, top)
        except Exception as e:
            print(e)
          


def name_list():
    dbhost = '120.26.211.213'
    dbuser = 'huanghai'
    passwd = 'huanghai_password'
    dbname = 'weixin'
    conn = pymysql.connect(dbhost, dbuser, passwd, dbname, charset='utf8')
    cur = conn.cursor()
    sql = '''SELECT NAME FROM weixin2 WHERE choose=1'''
    cur.execute(sql)
    rs = list(cur.fetchall())
    return rs


if __name__ == '__main__':
    name_list = name_list()
    #
    # pool = multiprocessing.Pool(processes=2)
    # for query in name_list:
    #     pool.apply_async(get_date, (query[0],))
    # pool.close()
    # pool.join()
    # for user in name_list[150:]:
    #     time.sleep(0.5)
    #     get_date(user[0])
    get_date('js-fund')


