#!/usr/bin/env python
#-*-coding:utf-8-*-

# http://www.cngold.com.cn/q/syjgzs.html
# http://www.cngold.com.cn/q/Handlers/Market/GetHqData.ashx
# groupcode:NYJGZS | pageSize:60 | pageIndex:1

# from urllib import request
# import urllib
# import http.cookiejar

import urllib2
import urllib
import json
import datetime
import sys
import MySQLdb

reload(sys)  
sys.setdefaultencoding('utf8') 

def write_log(info):
	with open("spider.log" ,"a") as f:
		f.write("[%s]:\t%s \n\n" % (datetime.datetime.now(),info))

def save_to_db(data):
	conn= MySQLdb.connect(
        host='localhost',
        port = 3306,
        user='root',
        passwd='pactera#12345',
        db ='spiderlog',
        charset='utf8'
    )
	cur = conn.cursor()
	cur.execute("set names 'utf8'");
	#sql = "insert into spider_log (create_time) values ('%s')" % datetime.datetime.now()
	cur.execute("insert into spider_log (create_time) values ('%s')" % datetime.datetime.now())

	cur.execute("select LAST_INSERT_ID()")
	logid = cur.fetchone()[0]

	for item in data:
		cur.execute("insert into spider_log_detail (logid,chageperc,priceindex,type) values (%s,%s,%s,%s)",
			(logid, item["ChangePerc"], item["PriceIndex"], item["Type"].encode("utf-8")))

	cur.close()
	conn.commit()
	conn.close()

def main():

	# proxy = urllib2.ProxyHandler({'http': 'http://ex_chzh:%40WSX3edc@10.68.211.5:8080/'})
	# auth = urllib2.HTTPBasicAuthHandler()
	# opener = urllib2.build_opener(proxy, auth, urllib2.HTTPHandler)
	# urllib2.install_opener(opener)

	postdata = {"groupcode": "NYJGZS", "pageSize": 60, "pageIndex": 1}
	headers = {
		"Accept": "*/*", 
		"Accept-Encoding": "gzip, deflate",
		"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
		"User-Agent": "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.101 Safari/537.36",
		"X-Requested-With": "XMLHttpRequest"
	}

	postdata_bytes = urllib.urlencode(postdata).encode('utf-8') 
	posturl = "http://www.cngold.com.cn/q/Handlers/Market/GetHqData.ashx"
	req = urllib2.Request(posturl, postdata_bytes, headers)  
	
	try:
		rsp = urllib2.urlopen(req)  
		text = rsp.read().decode("utf-8")
		# print "GET Response OK"
		# print text
		# write_log(text)
		data = json.loads(text)
		
		for item in data:
			print item["Type"].encode("utf-8"), type(item["Type"])

		save_to_db(data)
		
		

	except Exception as e:
		print "Ex:%s", e
		write_log(e)



if __name__ == '__main__':
	main()