#!\usr\bin\env python

import json
import urllib2
import urllib
import datetime
import sys
import MySQLdb

def write_log(info):
	with open("spider.log" ,"a") as f:
		f.write("[%s]:\t%s \n\n" % (datetime.datetime.now(),info))

def get_http_data(param, headers):
	data = json.dumps(param) + ""
	data = data.replace(' ','')
	post_url = 'https://www.greatgas.cn/prod/rest/goods/mall/searchByConditions'
	req = urllib2.Request(post_url,  data.encode('utf-8') , headers)

	rsp = urllib2.urlopen(req)  
	text = rsp.read().decode("utf-8")
	jsonobj = json.loads(text)
	return jsonobj

def save_gas_to_db(data):
	conn= MySQLdb.connect(
        host='localhost',
        port = 3306,
        user='root',
        passwd='pactera#12345',
        db ='spiderlog',
        charset='utf8'
    )
	cur = conn.cursor()
	cur.execute("set names 'utf8'");
	#sql = "insert into spider_log (create_time) values ('%s')" % datetime.datetime.now()
	cur.execute("insert into spider_log_gas (create_time) values ('%s')" % datetime.datetime.now())

	cur.execute("select LAST_INSERT_ID()")
	logid = cur.fetchone()[0]

	# for item in data:
	# 	cur.execute("insert into spider_log_gas_detail (logid,chageperc,priceindex,type) values (%s,%s,%s,%s)",
	# 		(logid, item["ChangePerc"], item["PriceIndex"], item["Type"].encode("utf-8")))

	lst = []
	sql = "insert into spider_log_gas_detail(logid, company_name, source_name, restricted_area, delivery_begin_date, delivery_end_date, delivery_code, quantity, guiding_price, pay_method, disburse_type, create_time) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
	for item in data:
		d = (logid, item["companyName"].encode("utf-8"), item["gasSourcesName"].encode("utf-8"),
			item["goodsRestrictedArea"].encode("utf-8"), item["goodsDeliveryBegdate"].encode("utf-8"),
			item["goodsDeliveryEnddate"].encode("utf-8"), item["goodsDeliveryCode"].encode("utf-8"),
			float(item["goodsQuantity"].encode("utf-8")), float(item["goodsGuidingPrice"].encode("utf-8")),
			item["payMethod"].encode("utf-8"), item["disburseType"].encode("utf-8"), 
			datetime.datetime.now())
		lst.append(d)

	cur.executemany(sql,lst)

	cur.close()
	conn.commit()
	conn.close()

def load_remote_data():
	param = {"arguments":[
		{"conditions":"[]"},
		{"from":"0"},{"pagesize":"1"},
		{"sortField":"dynProperties.goodsPubTime"},
		{"sortType":"desc"}]}

	headers = {
		"Accept": "*/*", 
		"Accept-Encoding": "gzip, deflate",
		"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
		"User-Agent": "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.101 Safari/537.36",
		"X-Requested-With": "XMLHttpRequest"
	}

	result = []
	totalnum = 0
	try:
		data = get_http_data(param, headers)
		totalnum = int(data["results"][1]["totalNum"])
		result.extend(data["results"][0]["goodsList"])

	except Exception as e:
		print "error:", e

	if(totalnum > 0):
		param["arguments"][1]["from"] = str(1)
		param["arguments"][2]["pagesize"] = str(totalnum)
		try:
			data = get_http_data(param, headers)
			result.extend(data["results"][0]["goodsList"])

		except Exception as e:
			print "error:", e

	print "done" + str(len(result))
	# print result
	save_gas_to_db(result)



if __name__ == '__main__':
	load_remote_data()
