# -*- coding: utf-8 -*-
import scrapy
from scrapy.linkextractors import LinkExtractor
from scrapy.spiders import CrawlSpider, Rule
from lxml import etree
import re
import os
import sys
reload(sys)   
sys.setdefaultencoding('utf8') 
class ZiliaoSpider(CrawlSpider):
    name = 'ziliao'
    allowed_domains = ['xuexi111.com']  
    start_urls = ['http://www.xuexi111.com/jiaocheng/index_1.html']
    rules = (
        Rule(LinkExtractor(allow=r'http://www.xuexi111.com/jiaocheng/shipin/'), callback='parse_item', follow=True),
        Rule(LinkExtractor(allow=r'http://www.xuexi111.com/jiaocheng/index_'), callback='parse_item', follow=True),
        Rule(LinkExtractor(allow=r'http://www.xuexi111.com/'), callback='parse_item', follow=True),
             )
    def parse_item(self, response):
        i = {}
        html = response.body
        crawl_url = response.url
        with open('E:/jiaocheng/F/Project/xuexi111/data/crawl.txt','a') as f:
            f.write(crawl_url + '\n')
        f.close()
        tree = etree.HTML(html)

        name = tree.xpath('/html/head/title/text()')
        print name

        #迅雷下载链接地址
        com = re.compile(r'ed2k:.*?"')
        ed2ks = re.findall(com,html)
        if len(ed2ks) != 0 :
            #try:
            #path = title#[21:]

            #TXT文件名字
            path = str(name[0]).replace("?", '_')
            #print path
            ipath = path.strip()
            #新建文件夹名字
            path_name = crawl_url.split('/')[3]
            ipath = path.decode('utf-8').encode('gb2312')
            #创建文件夹
            save_path = os.path.join('E:/jiaocheng/F/Project/xuexi111/data', path_name)
            isExists = os.path.exists(save_path)
            if not isExists:
                print "CREATE___[%s]____"%save_path
                os.makedirs(save_path)
                os.chdir(save_path)
            else:
                print "WENJIANJIA_____[%s]___exists"%save_path
            #except:
            #    print "文件夹创建失败！"
            #下载保存图片
            for i in range(0,len(ed2ks),3) :
                line = ed2ks[i]
                print line[:-1]
                try:
                    os.chdir(save_path)
                    f = open(ipath + '.txt', 'ab')#'video/' + 
                    f.write("\n\n**************------------------------********************\n\n")
                    f.write('\n\n'+line[:-1]+'\n\n')
                    f.close()
                except:
                    print "SAVE___FAILE"
        else :
            print "HAVE_NO_DATABASES"
        return i
