#coding:utf-8
import scrapy
import json
import unicodedata
import re
import computeDaysByNums
import time
import MySQLdb
import simplejson

class QuotesSpider(scrapy.Spider):
    name = "nums_per_month"
    tid = 0
    com = {}
    con = {}
    comlete = 0
    data = []
    def start_requests(self):
        self.tid = getattr(self, 'tid', None)
        tag = getattr(self, 'tag', None)
        if tag is not None:
            # yield scrapy.Request(url, meta={'dont_redirect':True}, headers=header,callback=self.parse)
            yield scrapy.Request(tag, callback=self.parse)

    def get_commit_nums(self, response):
        #把json转换为python对象
        json_to_python = json.loads(response.body.decode(response.encoding))
        data = json_to_python['series'][0]['data']
        if len(data) > 36:
            data = data[-36:]
        com_per_mon = {}
        for item in data:
            data_str = str(item[0])[0:-3] + '.000'
            # 时间戳转换为时间元组，再格式化输出
            data_num = time.strftime("%Y-%m-%d", time.localtime(float(data_str)))
            com_per_mon[data_num] = item[1]
        self.com = com_per_mon
        if(self.comlete == 1):
            self.insert_into_commu()
        else:
            self.comlete = 1
            

    def get_contributors_num(self, response):
        json_to_python = json.loads(response.body.decode(response.encoding))
        data = json_to_python['series'][0]['data']
        #如果不够三年的数据，有多少算多少
        #如果超出三年，有多少算多少
        if len(data) > 40:
            data = data[-40:]
        #print data
        #删掉季度之外多余的月份
        # length = len(data) / 3 * 3
        # if len(data) % 3 is not 0:
        #     data = data[-length]
        # res = 0
        # con_per_quar = {}
        con_per_mon = {}
        for item in data:
            data_str = str(item[0])[0:-3] + '.000'
            #时间戳转换为时间元组，再格式化输出
            data_num = time.strftime("%Y-%m-%d", time.localtime(float(data_str)))
            con_per_mon[data_num] = item[1]
        self.con = con_per_mon
        if(self.comlete == 1):
            self.insert_into_commu()
        else:
            self.comlete = 1

    def parse(self, response):
        code_url = response.css("div[class='chart watermark440']::attr(datasrc)").extract()
        yield scrapy.Request(code_url[1], self.get_commit_nums)
        yield scrapy.Request(code_url[2], self.get_contributors_num)
        # param = []
        # for key in self.com:
        #     param.append((tid, key, self.com, self.con))
        # print param
        # self.insert_into_commu(param)
        
    def insert_into_commu(self):
        
        
        tid = int(self.tid)
        param = []
        for key in self.com:
            param.append([tid, key, self.com[key], self.con[key]])
        param.sort(key=lambda k:k[1])
        # print param
        nums_per_quarter = []
        for i in range(0, len(param)):
            month = int(param[i][1][5:7])
            if (month + 2) % 3 == 0:
                if i + 2 < len(param):
                    self.data.append({'tid':tid, 'quarter':param[i][1][0:4]+'Q'+str((month+2)/3), 'commit':param[i][2]+param[i+1][2]+param[i+2][2], 'contributor':param[i][3]+param[i+1][3]+param[i+2][3]})

        json_object = simplejson.dumps(self.data)
        self.save_to_file('nums_per_quarter.json', json_object)
        #             nums_per_quarter.append([tid, param[i][1][0:4]+'Q'+str((month+2)/3), param[i][2]+param[i+1][2]+param[i+2][2], param[i][3]+param[i+1][3]+param[i+2][3]])
        
        # db = MySQLdb.connect("localhost","root","root","unispider")
        # cursor = db.cursor()
        # cursor.executemany("insert tbl_oss_sp_commu_src(tid, d, commit, contributor) values(%s,%s,%s,%s)", param)
        # cursor.executemany("insert tbl_oss_sp_commu_sta(tid, quarter, commit, contributor) values(%s,%s,%s,%s)", nums_per_quarter)

        # db.commit()

        # db.close()

    def save_to_file(self, file_name, contents):
        fh = open(file_name, 'w')
        fh.write(contents)
        fh.close()

