# -*- coding: utf-8 -*-
# import requests
import scrapy


class XicidailiSpider(scrapy.Spider):
    name = 'xicidaili'
    allowed_domains = ['xicidaili.com']
    start_urls = ['http://xicidaili.com/']

    def start_requests(self):
        headers = {
            'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
            'Accept-Encoding': 'gzip, deflate',
            'Accept-Language': 'zh-CN,zh;q=0.9',
            'Cache-Control': 'max-age=0',
            'Connection': 'keep-alive',
            'Host': 'www.xicidaili.com',
            'If-None-Match': 'W/"bdf8360c9f3c3c1f64ea412a211cd6b5"',
            'Upgrade-Insecure-Requests': '1',
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) '
                          'Chrome/68.0.3440.106 Safari/537.36 '
        }
        cookies = {
            "_free_proxy_session": "BAh7B0kiD3Nlc3Npb25faWQGOgZFVEkiJWQ1NGJlZDk0MTI3N2Q0NjFhMTIzYWZhMWY5MWU"
                                   "0MDRhBjsAVEkiEF9jc3JmX3Rva2VuBjsARkkiMXdXbmRMWUhzK2oyRVRRTTdpanRBRGVzWnBOSEh0"
                                   "S2RyVU9uUEdUQXdEN0U9BjsARg%3D%3D--18a3ec30fc929b0861d11671c7b55cf764ae4f03",
            "Hm_lvt_0cf76c77469e965d2957f0553e6ecf59": "1544508603,1544508996",
            "Hm_lpvt_0cf76c77469e965d2957f0553e6ecf59": "1544509532"

        }
        for i in range(1, 11):
            url = "https://www.xicidaili.com/nn/{}"
            yield scrapy.Request(url=url.format(i), headers=headers, cookies=cookies, callback=self.parse)

    def parse(self, response):
        # bs = BeautifulSoup(proxy_html, "html.parser")
        # print(bs.title)
        # ips = bs.find_all('tr')
        tr_list = response.xpath('//tr')
        ip_list = []
        test_url = "https://www.xicidaili.com/nn/"

        for i in range(2, len(tr_list)):
            ip_info = tr_list[i]
            host = ip_info.xpath('td[2]/text()').extract_first()
            port = ip_info.xpath('td[3]/text()').extract_first()
            ip = "{}:{}".format(host, port)
            print(ip)
            proxy_host = "http://" + ip

            proxy_test = {'http': proxy_host}

            try:
                pass
                # res = requests.get(test_url, proxies=proxy_test).status_code
            except:
                # print(res)
                continue
            else:
                print(ip)
                with open("proxy_list.txt", 'a') as f:
                    f.write(ip + '\n')
                ip_list.append(ip)

        # print(ip_list)
        # f.close()
        return ip_list
