# -*- coding: utf-8 -*-
'''
stock error close with lower
'''
import scrapy
from bs4 import BeautifulSoup
from finance.items import StockItem
from scrapy.selector import Selector
from scrapy.http import  Request
import re
import os

import json
import os

from finance.items import StockItem
import sqlite3

class StockHistory(scrapy.Spider):
	name = "stockhistory_"
	allowed_demains = ["sina.com.cn"]

	def __init__(self,type = None,*args,**kwargs):
		super(StockHistory, self).__init__(*args,**kwargs)	
		self.type = type	
		if not os.path.exists("./res/his_stock/"):
			os.makedirs("./res/his_stock/")
		if not os.path.exists("./res/his_futu/"):
			os.makedirs("./res/his_futu/")

	def get_codes(self):
		codes = []
		db = sqlite3.connect("./finance.db")
		cu = db.cursor()
		cu.execute("select code from stock_info")
		for row in cu.fetchall():
			codes.append(row[0])
		db.close()
		return codes


	def start_requests(self):
		#return [scrapy.FormRequest(self.head,callback=self.parse)]
		codes = self.get_codes()

		#codes.append("300485")
		for code_ in codes:
			head_ = "http://vip.stock.finance.sina.com.cn/corp/go.php/vMS_MarketHistory/stockid/%s.phtml"%(code_)
			link = "%s?year=%d&jidu=%d"%(head_,2016,4)
			req = Request(url = link,callback = self.day_parse)
			req.meta["code"] = code_
			yield req
			'''
			head_ = "http://vip.stock.finance.sina.com.cn/corp/go.php/vMS_MarketHistory/stockid/%s.phtml"%(code_)
			req =  Request(url = head_,callback = self.parse)
			req.meta["code"] = code_
			yield req
			'''

	def parse(self,response):
		code_ = response.meta["code"]
		years = response.xpath('//select[@name="year"]/option/text()').extract()
		print code_,"------------",years
		
		for year in years:
			for season in range(4):
				head_ = "http://vip.stock.finance.sina.com.cn/corp/go.php/vMS_MarketHistory/stockid/%s.phtml"%(code_)
				link = "%s?year=%s&jidu=%d"%(head_,year,season+1)
				req = Request(url = link,callback = self.day_parse)
				req.meta["code"] = code_
				yield req
				

	def day_parse(self,response):
		code_ = response.meta["code"]
		html_doc = response.body
		soup = BeautifulSoup(html_doc,'lxml')
		tagHold = soup.find('table',{'id':'FundHoldSharesTable'})
		if tagHold is None:
			return None

		item = StockItem()
		item['item_type'] = 'his_stock'
		item['item_code'] = code_
		data = []

		strRe = re.compile(r"<(?!((/?s?li)|(/?s?ul)|(/?s?td)|(/?s?img)|(/?s?br)|(/?s?span)|(/?s?b)))[^>]+>")
		index = 0
		tagRows = tagHold.findAll('tr')
		for row in tagRows:
			tagItems = row.findAll('td')
			if tagItems is None or len(tagItems) != 7:
				continue
			index = index + 1
			if index == 1:
				continue
			strRow = str(row)
			strRow = strRe.sub('',strRow)
			sel = Selector(text = strRow)
			res = sel.xpath('//td/text()').extract()
			res = [row.replace('\t','').replace('\n','').replace('\r','') for row in res]
			data.append(res)
		item['data'] = data

		return item