#coding=utf-8
import scrapy
from scrapy.crawler import CrawlerProcess

import datetime
import os
import re
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

import traceback
import json
import win32clipboard as w
import win32con

def setText(aString):#写入剪切板  
    w.OpenClipboard()  
    w.EmptyClipboard()  
    w.SetClipboardData(win32con.CF_UNICODETEXT, aString)  
    w.CloseClipboard()  

class IndexSpider(scrapy.Spider):
    name = "index"
    root_dir = "index/"
    output_dir = r"C:\Users\Administrator\Desktop\index.txt"
    base_url = "https://cn.investing.com/indices/major-indices"
    word_dict = {
        "01": "跌幅",
        "02": "下跌",
        "03": "涨幅",
        "04": "上涨",
    }

    def start_requests(self):
        urls = [
            self.base_url,
        ]
        for url in urls:
            yield scrapy.Request(url=url, callback=self.parse_index)

    def parse_index(self, response):
        # row_dict = self.extract_line(response, "179")
        row_dict = self.extract_line(response, "40820")
        print json.dumps(row_dict, ensure_ascii=False)
        text = row_dict["index_name"]+str(datetime.datetime.now().strftime('%m月%d日'))+self.word_dict[self.rise_down(row_dict["net_change"])]+row_dict["net_change"][1:]+"，"+self.word_dict[self.rise_down(row_dict["net_change_percent"])]+row_dict["net_change_percent"][1:]+"，收盘"+row_dict["last_price"].replace(",", "")+"。"
        print text
        setText(text)
        w.OpenClipboard()
        print w.GetClipboardData(w.CF_UNICODETEXT)

    def extract_line(self, response, id):
        row_dict = {}
        tbody = response.css("#cr_12>tbody")
        row = tbody.css("tr[id=pair_"+id+"]")
        index_name = row.css("td>a::text").extract_first()
        last_price = row.css("td[class=pid-"+id+"-last]::text").extract_first()
        net_change = row.css("td[class~=pid-"+id+"-pc]::text").extract_first()
        net_change_percent = row.css("td[class~=pid-"+id+"-pcp]::text").extract_first()
        row_dict["index_name"] = index_name
        row_dict["last_price"] = last_price
        row_dict["net_change"] = net_change
        row_dict["net_change_percent"] = net_change_percent
        return row_dict
    
    def rise_down(self, string):
        ret_code = "00"
        if string[0] == "-":
            if string[-1] == "%":
                ret_code = "01"
            else:
                ret_code = "02"
        else: 
            if string[-1] == "%":
                ret_code = "03"
            else:
                ret_code = "04"
        return ret_code

# process = CrawlerProcess({
#     'USER_AGENT': 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)'
# })

# process.crawl(IndexSpider)
# process.start() # the script will block here until the crawling is finished
