#!/usr/bin/env python
# coding=utf-8
#----------------------------------------------------------------------------
# 文件名：      niu.py
# 说明：       循环抓去牛叉诊股
# 作者：       fern
# 创建时间：    2015-4-21
# 版权：       (c) 2015 by fern
# 许可证：      apache license
#----------------------------------------------------------------------------
import mysqlDB
import hashlib,re,sys
from bs4 import BeautifulSoup
from  datetime  import  *
import gzip,urllib2,random,time
import urllib
import StringIO
import json
import socket
import traceback
socket.setdefaulttimeout(10)
reload(sys)
sys.setdefaultencoding('utf-8')

class Niu:
    def __init__(self):
        self.db =mysqlDB.Mydb(host='localhost', dataBase='stock', user='root', passwd='123456')
        self.db.dbLoad()

    def niu(self):
        #http://doctor.10jqka.com.cn/000415/
        now=datetime.now()
        date=now.strftime('%Y-%m-%d')
        time=now.strftime('%H:%M:%S')
        sql="SELECT * FROM  `stock` WHERE id <> 1244 AND (`niu`<>'%s' or `niu`='0');" %(date)
        query=self.db.query(sql)
        for row in query:
            code=''
            code=row['code'][2:8]
            if code == '300033': #同花顺自己的股 id 1244
                continue
            url="http://doctor.10jqka.com.cn/%s/" % code
            print "ID: %s Code: %s" % (row['id'],row['code'])
            html=self.get_gzip(url)
            if html==False:
                continue
            try:
                html=html.decode("GBK", 'ignore')
            except:
                print html
                continue
            soup = BeautifulSoup(html)
            prion=soup.find(name="div",attrs={"class":"stockvalue"}).contents
            stockvalue=''
            for pf in prion:
                stockvalue+=pf.string
            stocktotal=soup.find(name="div",attrs={"class":"stocktotal"}).string
            value_bar=soup.find(name="div",attrs={"class":"value_bar"}).find(name="span",attrs={"class":"cur"}).string
            value_info=soup.find(name="div",attrs={"class":"value_info"})
            short=value_info.find(name="li",attrs={"class":"short"}).p.string
            mid=value_info.find(name="li",attrs={"class":"mid"}).p.string
            long=value_info.find(name="li",attrs={"class":"long"}).p.string
            cnt=soup.find(name="p",attrs={"class":"cnt"})
            title=soup.find(name="strong",attrs={"class":"title"}).contents[0]
            sql=''
            sql="INSERT INTO `niu`(`code`, `stockvalue`,`title`, `stocktotal`, `value_bar`, `short`, `mid`, `long`, `cnt`, `date`, `time`) VALUES "
            sql+="('%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" % (row['code'],stockvalue,title,stocktotal,value_bar,short,mid,long,cnt,date,time)
            self.db.save(sql)
            self.update_stock(row['code'],'niu',date)

    def update_stock(self,code,filed,date):
        sql="UPDATE `stock` SET  `%s`='%s' WHERE `code`='%s';" % (filed,date,code)
        self.db.save(sql)

    def get_gzip(self,url,num=0):
        if num==None:
            num=0
        elif num>3:
            return False
        header = {'Accept-Charset':'GBK,utf-8;q=0.7,*;q=0.3','User-Agent' : 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.151 Safari/534.16'}
        request = urllib2.Request(url,headers=header)
        request.add_header('Accept-encoding', 'gzip')
        opener = urllib2.build_opener()
        try:
            f = opener.open(request,timeout=10)
            isGzip = f.headers.get('Content-Encoding')
            if isGzip :
                compresseddata = f.read()
                compressedstream = StringIO.StringIO(compresseddata)
                gzipper = gzip.GzipFile(fileobj=compressedstream)
                data = gzipper.read()
            else:
                data = f.read()
            return data
        except:
            traceback.print_exc()
            if num<3:
                num=num+1
                self.get_gzip(url,num)

    def get_html(self,url,num=0):
        if num==None:
            num=0
        elif num>3:
            return False
        try:
            if url != None:
                page = urllib.urlopen(url)
                html = page.read()
            else:
                html=""
            return html
        except:
            traceback.print_exc()
            if num<3:
                num=num+1
                self.get_html(url,num)

    def close(self):
        self.db.dbClose()

if __name__ == '__main__':
    print "Prepare data. is come soon ..."
    niu=Niu()
    niu.niu()
    niu.close()
