# encoding=UTF-8

import urllib
import re
import pandas as pd
import datetime as dt
import time
import requests as req
import os
import urllib2
from sqlalchemy import create_engine


# 数据库连接目的
def db():
    try:
        conn = create_engine('mysql+pymysql://root:txdb818283TXDB@sh-cdb-olur5669.sql.tencentcdb.com:63351/stock?charset=utf8')
        return conn
    except Exception, e:
        print('[db]{}'.format(e))
        return None


# 规则
# item/action_from_to
# 存储Target/Source命名
# web - 网络
# history - 数据库[历史记录]
# analyze - 数据库[分析结果]
# db - 数据库[其它]
# file - 文件/模型


# 从网络获取code list并且保存到数据库
def save_code_list_from_web():
    conn = db()
    if conn is None:
        print('[code_list_web_db] can not open database')
        return None

    url = 'http://quote.eastmoney.com/stocklist.html'
    text = urllib.urlopen(url).read().decode('gbk')
    s = '<li><a target="_blank" href="http://quote.eastmoney.com/(\S\S)(.+).html">(.+)\((\d+)\)</a></li>'
    pat = re.compile(s, re.MULTILINE)
    item_list = pat.findall(text)
    for item in item_list:
        sql = u'''insert into `code`
              (`market`, `code`, `name`) values
              ("{m}", "{c}", "{n}")'''.format(m=item[0], c=item[1], n=item[2])
        try:
            conn.execute(sql)
        except Exception, e:
            print('[save_code_list]can not insert value: '+e.message)
            continue
        finally:
            conn.close()


# 从数据库获取保存到的code list
def code_list_from_db():
    conn = db()
    if conn is None:
        print('[code_list_from_db] can not open database')
        return None

    sql = u'select `code` from `code` where `code` like "0%%" or `code` like "3%%" or `code` like "6%%"'

    try:
        t = pd.read_sql(sql, con=conn)
        code_list = list(t['code'])
        return code_list
    except Exception, e:
        print('[code_list_from_db]{}'.format(e))
        return None
    finally:
        conn.close()


# 从网络获取历史记录并保存到数据库
def save_history_to_db(code, start='1990-01-01'):
    def read_remote(url):
        try:
            return pd.read_csv(url, encoding='GBK')
        except Exception, e:
            print('Mark: Can not fetch from web')
            time.sleep(10)
            return read_remote(url)

    conn = db()
    if conn is None:
        print('[fetch_history_from_web] can not open database')
        return

    prefix = '0' if code.startswith('6') else '1'
    today = dt.datetime.now().strftime('%Y-%m-%d')
    url = 'http://quotes.money.163.com/service/chddata.html?code={c}&&start={s}&end={e}'.format(c=prefix+code, s=start, e=today)
    print(url)
    t = read_remote(url)
    if t is not None and t.shape[0] > 0:
        t.columns = ['Date', 'Code', 'Name', 'Close', 'High', 'Low', 'Open', 'PrevOpen',
                     'Distance', 'DisRate', 'Change', 'Count', 'Amount', 'TotalValue',
                     'MarketValue', 'Number']

        t.drop(['Code', 'Name', 'Number', 'PrevOpen', 'Distance', 'DisRate'], axis=1, inplace=True)
        t.dropna(axis=0, how='any', inplace=True)
        t = t[~ t['Close'].isin([0, None, 0.0, 'None', 'NaN'])]
        t['Date'] = t['Date'].map(lambda x: dt.datetime.strptime(x, '%Y-%m-%d'))
        t.set_index(keys=['Date'], inplace=True)
        try:
            t.to_sql(code, conn, if_exists='append')
        except Exception, e:
            print('[fetch_history_from_web]can not insert value: ' + e.message)
        finally:
            conn.close()


# 从网络获取补充历史记录并保存到数据库
def append_web_history_to_db(code):
    sql = u'select max(`Date`) from `{code}`'.format(code=code)
    fetch_date = '1990-01-01'
    conn = db()
    if conn is None:
        print('[append_web_history] can not open database')
        return
    try:
        q = conn.execute(sql)
        last_date = q.fetchall()[0][0]
        if last_date is not None:
            print('from date:' + last_date)
            date_time = dt.datetime.strptime(last_date, '%Y-%m-%d')
            next_date = date_time + dt.timedelta(days=1)
            fetch_date = next_date.strftime('%Y-%m-%d')
    except Exception, e:
        print('[append_web_history_from_web]'+e.message)
    finally:
        conn.close()

    save_history_to_db(code, fetch_date)


# 从数据库获取保存的历史记录
def history_from_db(code):
    sql = u'select * from `{code}` group by `Date` order by `Date`'.format(code=code)
    print(sql)
    conn = db()
    if conn is None:
        print('[append_web_history] can not open database')
        return None

    try:
        t = pd.read_sql(sql, con=conn)
        if t.shape[0] > 100:
            return t
        else:
            print('[history_from_db]table is too small')
            return None
    except Exception, e:
        print('[history_from_db]can not get row list:'+e.message)
        return None
    finally:
        conn.close()


# 从数据库获取保存的最后10条历史记录
def last_history_from_db(code, number):
    sql = u'select * from `{code}` group by `Date` order by `Date` desc limit {number}'.format(code=code, number=number)
    conn = db()
    if conn is None:
        print('[last_10_history_from_db] can not open database')
        return None

    try:
        t = pd.read_sql(sql, con=conn)
        return t
    except Exception, e:
        print('[last_10_history_from_db]can not get row list:'+e.message)
        return None
    finally:
        conn.close()


# 上传文件
def upload(code, file):
    url = u'http://biz2wiz.com/ai/upload'
    try:
        files = {'file':(code+'.mlmodel', open(file, 'rb'))}
        r = req.post(url, files=files)
        print(r)
    except Exception, e:
        print('[upload]{}'.format(e))


# 下载文件
def download(code, file):
    model_file = u'./data/{code}.mlmodel'.format(code=code)
    if not os.path.exists(model_file):
        url = u'http://biz2wiz.com/ai/download'
        f = urllib2.urlopen(url)
        with open(model_file, 'wb') as file:
            file.write(f.read())
    else:
        print('model file already exists:'+code)
