#coding:utf-8
from bs4 import BeautifulSoup
import os
import pymysql as mysql
import pypyodbc as pyodbc
import string
import requests
import  math
import time
import configparser

config=configparser.ConfigParser()	
with open("config.ini","r") as cfgfile:
	config.readfp(cfgfile)
	
	ROOT = config.get( "fileroot", "root" )
	

proxies = {
   "http": "192.168.0.71:8012",
}
hdrs = {'User-Agent':'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.108 Safari/537.36',
		}
er_photo = r"E:\down_data\博图\error\er.jpg"
er_photo2 = r"E:\down_data\博图\error\er2.jpg"


def read_sql():
	conn = mysql.connect( host = '192.168.0.91', port = 3306, user = 'root', passwd = 'vipdatacenter', db = 'botu',charset='utf8mb4',)
	cur = conn.cursor()
	cur.execute('''select rawid,url from cover where stat = 0 and failcount = 0 limit 10000,20000 ''' )
	sql_data = cur.fetchall()
	conn.commit()
	conn.close()
	return sql_data
def write_sql(sql):
	conn = mysql.connect( host = '192.168.0.91', port = 3306, user = 'root', passwd = 'vipdatacenter', db = 'botu',charset='utf8mb4',)
	cur = conn.cursor()
	cur.execute(sql)
	conn.commit()
	conn.close()
	
def down_cover(rawid,url,er_data,er_data2):
	pathroot = os.path.join(ROOT,  time.strftime('%Y%m%d',time.localtime(time.time())),"cover",str(math.ceil(int(rawid)/1000)))
	if not os.path.exists(pathroot):
		os.makedirs(pathroot)
	file_paht = os.path.join(pathroot,str(rawid)+'.jpg')
	if os.path.exists(file_paht):
		print("图片存在")
		return
	url_phono = "http://222.198.130.68/"+url
	#print(url_phono+" : "+"rawid: "+str(rawid))
	try:
		r = requests.get(url_phono, proxies=proxies,headers = hdrs,timeout = 5)
	except:
		print("timeout")
		sql = "update cover set failcount = failcount+1 where rawid = '%s' " % (rawid)
		write_sql(sql)
		return
	if er_data ==r.content:
		print(str(rawid)+": 图片错误")
		sql = "update cover set failcount = 3 where url = '%s' " % (url)
		write_sql(sql)
		return
	if er_data2 ==r.content:
		print(str(rawid)+": 图片错误2")
		sql = "update cover set failcount = 3 where url = '%s' " % (url)
		write_sql(sql)
		return
	with open(file_paht,'wb') as f:
		f.write(r.content)
	filesize = os.path.getsize(file_paht)
	if filesize <= 10:
		os.remove(file_paht)
		return 
	sql = "update cover set stat = 1 where rawid = '%s' " % (rawid)
	write_sql(sql)
	print(str(rawid)+': 下载成功')
	#time.sleep(1)
if __name__=="__main__":

	with open(er_photo,"rb") as f:
		er_data = f.read()
	with open(er_photo2,"rb") as f:
		er_data2 = f.read()
	while True:
		sql_data = read_sql()
		print("有"+str(len(sql_data))+"张图片需要下载")
		if len(sql_data) ==0:
			break
		for data in sql_data:
			rawid = data[0]
			url = data[1]
			down_cover(rawid,url,er_data,er_data2)
			