# -*- coding: utf-8 -*-
'''
author:Cooper
this is a library for regular userage
'''

import re
import os
import sys
import time
import shutil
import string
import StringIO
import requests

import json
import ConfigParser

import urllib2
import MySQLdb
from BeautifulSoup import BeautifulSoup

import xlrd
import xlwt

config={
    'mysqld_win':{
    'host':'localhost',
    'user':'root',
    'pw':'',
    'charset':'utf8'
    },
    'mysqld_linux':{
    'host':'localhost',
    'user':'root',
    'pw':'LTS98',
    'charset':'utf8'
    }
}

sys_encoding=sys.getfilesystemencoding()
#platform=sys.platform
os_name=os.name

def getConn(db='test'):
    import MySQLdb
    conn=None
    try:
        if os_name=='posix':
            conn = MySQLdb.connect(host=config['mysqld_linux']['host'],
                                   user=config['mysqld_linux']['user'],
                                   passwd=config['mysqld_linux']['pw'],
                                   db=db,
                                   charset=config['mysqld_linux']['charset'])
        elif os_name=='nt':
            conn = MySQLdb.connect(host=config['mysqld_win']['host'],
                                   user=config['mysqld_win']['user'],
                                   passwd=config['mysqld_win']['pw'],
                                   db=db,
                                   charset=config['mysqld_win']['charset'])
    except:
        conn=None
    return conn

def writeHttpLog(url,t1,t2):
    db=MySQLdb.connect(host="localhost",
                       user="root",passwd="",
                       db="httpresponse",charset="utf8")
    db.query("INSERT INTO httpresponse VALUES('{0}','{1}','{2}')".format(url,t1,t2))
    db.commit()
    db.close()
    pass

def writeHttpStream(dp,url,s):
    id=random.randint(1,10**10)
    with open(dp+'/dict.txt','a') as f:f.write(url+'\t'+str(id)+'\n')
    with open(dp+'/'+str(id),'w') as f:f.write(s)
    pass

def httpRequest(url='',interval=3):
    r=None
    c=None
#    s=None
#    while True:
#        try:
#            #t1=time.strftime('%Y-%m-%d %H:%M:%S',time.localtime())
#            f=urllib2.urlopen(url,timeout=10)
#            s=f.read()
#            f.close()
#            #t2=time.strftime('%Y-%m-%d %H:%M:%S',time.localtime())
#            #writeHttpLog(url,t1,t2)
#            break
#        except urllib2.HTTPError as e:
#            if e.getcode()==302:
#                print 'URL Redirect on %s'%url
#                break
#            else:
#                pass
#        except Exception as e:
#            print e.message
#        finally:
#            time.sleep(interval)
#        pass
#    return s
    while 1:
        try:
            r=requests.request('GET',url,timeout=10)
            c=r.content
            break
        except:
            pass
    return c

def __getSocketFp(url):
    '''Return str from url'''
    s=''
    while True:
        try:
            r=requests.request('GET',url,timeout=10)
#            f=urllib2.urlopen(url)
#            s=f.read()
#            f.close()
            break
        except:
            print 'error to read socket object'
            time.sleep(5)
    return r.content

def __tranSQLChar(s):
    '''operate SQL command,change ' to \'," to \"'''
    s=s.replace('\'',r'\'')
    #s=s.replace('"',r'\"')
    return s

def parseHTML(s):
    '''Parse HTML text to plan text,return it'''
    soup=BeautifulSoup(s)
    s=soup.prettify()
    for t in re.findall(r'\<.+\>',s):
        s=s.replace(t+'\n','')
    return s

def openMiniWIFI():
    '''Open MiniWIFI'''
    if os.system('netsh wlan start hostednetwork')==0:
        print 'Success to start MiniWIFI'
    else:
        print 'Fail to start MiniWIFI'
    return

def mkDirX(path):
    '''Create directory,X edition.'''
    if os.path.exists(path):
        return
    path=path.replace('\\',r'/')
    try:
        os.mkdir(path)
    except:
        mkDirX(path.strip('/')[:path.strip('/').rindex('/')])
        mkDirX(path.strip('/'))
    pass

def copyX(src,dst,override,ignore=[]):
    '''Like Windows copyX,copy everything to another folder.'''
    if not os.path.isdir(src):
        sys.exit(0)
    if not os.path.exists(dst):
        mkDirX(dst)
    log=list()
    for o in os.listdir(src):
        if len(ignore)>0:
            r=r'({0})$'.format('\\'+'|\\'.join(ignore))
            if len(__import__('re').findall(r,o))>0:
                continue
        if os.path.isfile(src+'/'+o):
            if os.path.exists(dst+'/'+o) and override:
                os.remove(dst+'/'+o)
                shutil.copy(src+'/'+o,dst+'/'+o)
                log.append('updated:'+dst+'/'+o)
            else:
                shutil.copy(src+'/'+o,dst+'/'+o)
                log.append('added:'+dst+'/'+o)
        elif os.path.isdir(src+'/'+o):
            if not os.path.exists(dst+'/'+o):
                mkDirX(dst+'/'+o)
                log.append('added:'+dst+'/'+o)
            return log+copyX(src+'/'+o,dst+'/'+o,override,ignore)
    return log

def syncDir(src,dst,ignore=[]):
    '''synchronous files from path1 to path2,return a log list'''
    log=list()
    for o in os.listdir(src):
        if len(ignore)>0:
            r=r'({0})$'.format('\\'+'|\\'.join(ignore))
            if len(__import__('re').findall(r,o))>0:
                continue
        if os.path.isfile(src+'/'+o):
            if os.path.exists(dst+'/'+o):
                if os.path.getmtime(src+'/'+o)>os.path.getmtime(dst+'/'+o):
                    os.remove(dst+'/'+o)
                    shutil.copy(src+'/'+o,dst+'/'+o)
                    log.append('updated:'+dst+'/'+o)
            else:
                shutil.copy(src+'/'+o,dst+'/'+o)
                log.append('added:'+dst+'/'+o)
        elif os.path.isdir(src+'/'+o):
            if os.path.exists(dst+'/'+o):
                log+=syncDir(src+'/'+o,dst+'/'+o, ignore)
            else:
                copyX(src+'/'+o, dst+'/'+o, False ,ignore)
                log.append('added:'+dst+'/'+o)
    return log

def listFilesX(path):
    '''Return files from directory,subfolders included.'''
    l=list()
    for t in os.listdir(path):
        if os.path.isfile(path+'/'+t):
            l.append(path+'/'+t)
        elif os.path.isdir(path+'/'+t):
            l=l+listFilesX(path+'/'+t)
    return l

def listExFilesX(path,exts):
    '''Return files with specified exts from directory'''
    l=list()
    for t in os.listdir(path):
        if os.path.isfile(path+'/'+t):
            r=r'({0})$'.format('\\'+'|\\'.join(exts))
            if len(__import__('re').findall(r,t))>0:
                l.append(path+'/'+t)
        elif os.path.isdir(path+'/'+t):
            l=l+listExFilesX(path+'/'+t,exts)
    return l

def listIgnoreFilesX(path,ignore):
    '''Return files without specified exts from directory'''
    l=list()
    for t in os.listdir(path):
        if len(ignore)>0:
            if os.path.isfile(path+'/'+t):
                r=r'({0})$'.format('\\'+'|\\'.join(ignore))
                if len(__import__('re').findall(r,t))>0:
                    continue
        if os.path.isfile(path+'/'+t):
            l.append(path+'/'+t)
        elif os.path.isdir(path+'/'+t):
            l=l+listIgnoreFilesX(path+'/'+t,ignore)
    return l

def netConn(url='http://www.baidu.com'):
    '''Check the network condition'''
    while True:
        try:
            r=requests.request('GET',url)
#            f=urllib2.urlopen(url)
#            f.close()
            print 'Success to connect'
            break
        except:
            time.sleep(5)
            print time.asctime(),'Not connected...'
    pass

def showYYETsTV():
    '''Check YYETS TV Series Update,return list object'''
    print time.strftime('%Y-%m-%d',time.localtime())
    config=ConfigParser.RawConfigParser(allow_no_value=True)
    with open('C:/yyets.ini') as f:
        config.readfp(f)
    sites=[(t[1]) for t in config.items('TV series')]
    tag=config.get('TV series format','format').split(',')
    r=list()
    for site in sites:
        print site
        f=urllib2.urlopen(site)
        s=f.read()
        f.close()
        soup=BeautifulSoup(s)
        t=soup.find('meta',{'name':'keywords'})['content'].split(',')[:2]
        print t[0],t[1]
        print site
        kw='更新日期'
        print re.findall(kw+r'.+(\d{4}\-\d{2}\-\d{2})',s)[0]
        t=soup.findAll('div',id='dow_bt_2')[0]
        t=t.first('ul').findAll('li')
        l=[([tt.a['title'],tt.a['href']]) for tt in t if tt.a!=None]
        d=dict()
        for tagT in tag:
            d[tagT]=dict()
        for li in l:
            for tagT in tag:
                if li[0].find(tagT)!=-1:
                    if len(re.findall(r'S(\d+)E(\d+)',li[0]))==0:
                        continue
                    i=string.atoi(re.findall(r'S(\d+)E(\d+)',li[0])[0][1])
                    d[tagT][i]=li[1]
        for k in d:
            print k
            t=d[k].keys()
            t.sort()
            print t
        del soup,f,l,t
        r.append(d)
        print ''
    return r

def showOpenYaleCourse():
    '''return a dict object,with dict{'ext':[[title,href],],}'''
    config=ConfigParser.RawConfigParser(allow_no_value=True)
    with open('C:/yyets.ini') as f:
        config.readfp(StringIO.StringIO(f.read()))
    sites={t[0]:t[1][1:-1].split(',') for t in config.items('Open Yale course')}
    for site in sites:
        f=urllib2.urlopen(sites[site][0])
        tag=sites[site][1:]
        s=f.read()
        f.close()
        soup=BeautifulSoup(s)
        t=soup.find('meta',{'name':'keywords'})['content'].split(',')[:2]
        print t[0],t[1]
        t=soup.findAll('div',id='dow_bt_2')[0]
        t=t.first('ul').findAll('li')
        l=[([tt.a['title'],tt.a['href']]) for tt in t if tt.a!=None]
        exts=list()
        dExt=dict()
        for li in l:
            t=re.findall(r'\.(\w+)\s+\d+\.*\d*\s',li[0])
            if len(t)>0:
                if exts.count(t[0])==0:
                    exts.append(t[0])
                if dExt.has_key(t[0]):
                    dExt[t[0]].append(li)
                else:
                    dExt[t[0]]=[li]
            pass
        return dExt
    pass

def updateZhihuReader():
    '''
    Check and save new ZHIHU hot QA
    Data from www.zhhu.com/reader
    '''
    for i in range(1,10**5):
        s=__getSocketFp('http://www.zhihu.com/reader/json/{0}'.format(str(i)))
        time.sleep(2)
        j=json.loads(s)
        db=MySQLdb.connect(host="localhost",user="root",passwd="",db="zhihu",charset="utf8")
        n=0
        ext=False
        for jt in j:
            cmd="INSERT INTO ZHIHU VALUES('{0}',".format(jt[0])
            cmd+="'{0}',".format((__tranSQLChar(jt[7][1])).encode('utf-8'))
            cmd+="'{0}',".format((__tranSQLChar(jt[7][2])).encode('utf-8'))
            cmd+="'{0}',".format(','.join([__tranSQLChar(t[1]) for t in jt[7][7]]).encode('utf-8'))
            t=time.localtime(jt[4])
            cmd+="'{0}',".format(time.strftime('%Y-%m-%d %H:%M:%S',t))
            if isinstance(jt[6],list):
                try:cmd+="'{0}',".format(__tranSQLChar(jt[6][0])).encode('utf-8')
                except:cmd+="NULL,"
            else:cmd+="'匿名用户',"
            try:cmd+="'{0}',".format((__tranSQLChar(jt[2].encode('utf-8'))))
            except:cmd+='NULL,'
            try:
                cmd+="'http://www.zhihu.com/question/{0}',".format(str(jt[7][3]))
                cmd+="NOW())"
                db.query(cmd)
                db.commit()
                n+=1
            except MySQLdb.IntegrityError as e:
                if e.args[0]==1062:
                    ext=True
                    break
            except Exception as e:print e
            try:print i,n,jt[7][1],"http://www.zhihu.com/question/{0}".format(str(jt[7][3]))
            except:print i,n,"http://www.zhihu.com/question/{0}".format(str(jt[7][3]))
        db.close()
        if ext:break
    pass

def readNMCWeather():
    '''Check weather from National Meteorological Center of CMA'''
    config=ConfigParser.RawConfigParser(allow_no_value=True)
    with open('C:/weather.ini') as f:
        config.readfp(f)
    d={t[0]:t[1].split(',') for t in config.items('weather')}
    dj={1:8,5:8,2:6,6:6,3:12,7:12}
    for k in d:
        url=d[k][1]
        s=__getSocketFp(url)
        soup=BeautifulSoup(s,fromEncoding='GB2312')
        s=soup.find('div',id='city_body').findAll('div',{'class':'weather_div'})
        print d[k][0].decode('utf-8')
        for st in s:
            t=[(tt.text) for tt in st.findAll('li') if len(tt.text.strip())>0]
            for i in range(8):
                if i>=len(t) or i in (0,4):
                    continue
                t[i]=t[i].encode('GB2312').ljust(dj[i]).decode('GB2312')
            print st.div.text,' '.join(t)
        print ''
    pass

def readExcel(fp):
    wb=xlrd.open_workbook(fp)
    sh=wb.sheet_by_index(0)
    s=list()
    for i in range(sh.nrows):
        s.append(sh.row_values(i))
    del wb,sh,i
    return s

def exportZhihuData(fp):
    wb=xlwt.Workbook('utf-8')
    ws=wb.add_sheet('zhihu')
    ws.write(0,0,'title')
    ws.write(0,1,'description')
    ws.write(0,2,'link')
    ws.write(0,3,'answernick')
    ws.write(0,4,'answer')
    ln=1
    g=readZhihuData()
    d=g.next()
    tl=list()
    while d!=None:
        if len(d)==0:
            break
        for li in d:
            one=[d[li]['title']]
            one.append(d[li]['description'] or '')
            one.append(d[li]['link'])
            one.append(d[li]['answernick'] or '')
            one.append(parseHTML(d[li]['answer']))
            for i in range(5):
                try:
                    ws.write(ln,i,one[i])
                except:
                    print one[i]
            ln+=1
        d=g.next()
    g.close()
    wb.save(fp)
    return True

def main():
    pass

if __name__ == '__main__':
    main()