from scrapy.contrib.spiders import  CrawlSpider, Rule
from scrapy.spider import BaseSpider
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import HtmlXPathSelector
from scrapy.item import Item
from douban.items import QihooItem 
from douban.items import QihooDetailItem
import re
import sys  
reload(sys)  
sys.setdefaultencoding('utf8')   

detailURLs = "result/detailurl.txt";
tempOUT = open("out.tmp",'w');
tempOUT2 = open("out2.tmp",'w');
class QihooDSpider(CrawlSpider):       
#CrawlSpider):
	name = "Qihoo2"
	allowed_domains = ["360.cn"]

	def start_requests(self):
		for line in open(detailURLs):
			tempOUT.write(""+line+"\n");
			a,b,c,d,e = line.strip().split("\"");
			tempOUT.write(""+d+"\n");	
			yield self.make_requests_from_url(d);

	def parse(self, response):
		hxs = HtmlXPathSelector(response)
		iconURL = hxs.select("/html[1]/body[1]/div[4]/div[2]/div[1]/div[1]/div[1]/dl[1]/dt[1]/img[1]")[0].extract();
		gameName = hxs.select("/html[1]/body[1]/div[4]/div[2]/div[1]/div[1]/div[1]/dl[1]/dd[1]/h3[1]")[0].extract();
		t_srcs_str = response.body 
		t_srcs = re.findall(r"var srcs = \[([\s\S]*?)\];",t_srcs_str)[0];
		t_imgs = re.findall(r"<img src=\"(.*?)jpg\"",t_srcs_str);
		gameImages = "";
		for i in t_imgs:
			gameImages += i;
			gameImages += "\002"
		whichAppStroe = re.findall(r"'source':'(.*?)'",t_srcs)[0]
		sizeMB = re.findall(r"'size':'(.*?)'",t_srcs)[0];
		downloadURL = re.findall(r"'downurl':'(.*?)'",t_srcs)[0];
		marketIcon = re.findall(r"'mkicon':'(.*?)'",t_srcs)[0];
		url = 'http://intf.baike.360.cn/index.php'
		commentStar =  hxs.select("/html[1]/body[1]/div[4]/div[2]/div[1]/div[1]/div[1]/dl[1]/dd[2]/p[1]/span[1]")[0].extract();
		commentScore = hxs.select("/html[1]/body[1]/div[4]/div[2]/div[1]/div[1]/div[1]/dl[1]/dd[2]/em[1]")[0].extract();
		downloadTimes = hxs.select("/html[1]/body[1]/div[4]/div[2]/div[1]/div[1]/div[1]/dl[1]/dd[2]/p[2]")[0].extract();
		updateTime = hxs.select("/html[1]/body[1]/div[4]/div[2]/div[1]/div[1]/div[1]/dl[1]/dd[2]/p[3]")[0].extract();
		hasAdOrNot = hxs.select("/html[1]/body[1]/div[4]/div[2]/div[1]/div[1]/div[1]/ul[1]/li[3]")[0].extract();
		freeOrMoney = hxs.select("/html[1]/body[1]/div[4]/div[2]/div[1]/div[1]/div[1]/ul[1]/li[2]")[0].extract();
		gameVersion =  hxs.select("/html[1]/body[1]/div[4]/div[2]/div[1]/div[1]/div[4]/div[1]/div[1]/table[1]/tbody[1]/tr[1]/td[2]")[0].extract();
		gameOS = hxs.select("/html[1]/body[1]/div[4]/div[2]/div[1]/div[1]/div[4]/div[1]/div[1]/table[1]/tbody[1]/tr[1]/td[3]")[0].extract();
		language= hxs.select("/html[1]/body[1]/div[4]/div[2]/div[1]/div[1]/div[4]/div[1]/div[1]/table[1]/tbody[1]/tr[2]/td[1]")[0].extract();
		qihooClassify = hxs.select("/html[1]/body[1]/div[4]/div[2]/div[1]/div[1]/div[4]/div[1]/div[1]/table[1]/tbody[1]/tr[2]/td[2]")[0].extract();
		gameCompany = hxs.select("/html[1]/body[1]/div[4]/div[2]/div[1]/div[1]/div[4]/div[1]/div[1]/table[1]/tbody[1]/tr[2]/td[3]")[0].extract();
		qihooDescription =  hxs.select("/html[1]/body[1]/div[4]/div[2]/div[1]/div[1]/div[4]/div[1]/div[1]/div[1]/div[1]")[0].extract();
		commentCount=  hxs.select("/html[1]/body[1]/div[4]/div[2]/div[1]/div[1]/div[3]/ul[1]/li[2]/em[1]")[0].extract()
		tempOUT.write("!!!"+iconURL+"\001"+gameName+"\001"+whichAppStroe+"\001"+marketIcon+"\001"+commentStar+"\001"+commentScore+"\001"+downloadTimes+"\001"+updateTime+"\001"+hasAdOrNot+"\001"+freeOrMoney+"\001"+sizeMB+"\001"+gameVersion+"\001"+gameOS+"\001"+language+"\001"+qihooClassify+"\001"+gameCompany+"\001"+qihooDescription+"\001"+gameImages+"\001"+downloadURL+"\001"+commentItem+"\001"+commentCount+"\001"+url);
        	#payload = {'c': 'message', 'a':'getmessage', '':'0', 'store_type':'0'}
        	#return FormRequest(url, formdata=payload, callback=self.parse_stores)
		return 0;

	def parse_stores(self, response):
        	data = json.loads(response.body)
        	for store in data['stores']['listing']:
        		yield McDonaldsItem(name=store['name'], address=store['address'])
