#!/usr/bin/env python
# -*- coding:utf-8 -*-
import Queue
import os
import subprocess
import threading

import requests
import time

import schedule
import sys

reload(sys)
sys.setdefaultencoding('utf8')
# 学习资源 https://zhuanlan.zhihu.com/p/25978264
# 这个用于将当前目录或子目录加入路径  加入父目录是无效的
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),"..")))
# 加入父目录用这个
sys.path.insert(0,'../..')

from proxyips.mysqlmoudel.myusesql.spidersql import SpiderSql
from proxyips.mysqlmoudel.sqlbuilder import MysqlBuilder
from proxyips.mysqlmoudel.sqlfactory import Abssql

class clearIneffectualips():

    def __init__(self):
        mysql = Abssql.getsqlfractry().mysqlFractry("mysql")
        builder = MysqlBuilder('../config/mysql.ini', 'mysqllocalhost')
        moudle = builder.build_all().get_moudle().todict()
        print(moudle)
        mysql.link(moudle)
        self.mysql = mysql
        self.spidersql = SpiderSql()
        self.cursor = mysql.cursor
        self.db = mysql.db

    def getlockips(self):
        sql = self.spidersql.getproxyipstatus("lock")
        print (sql)
        self.cursor.execute(sql)
        self.mysql.db.commit()
        # 获取请求的所有数据
        datas = self.cursor.fetchall()
        return datas

    def getNullips(self):
        sql = self.spidersql.validationproxyip()  # 保存到数据库
        print (sql)
        self.cursor.execute(sql)
        self.mysql.db.commit()
        # 获取请求的所有数据
        datas = self.cursor.fetchall()
        # 为数据加锁 多线程需要
        print ("******************")
        for data in datas:
            print (data)
            sql = self.spidersql.lockproxyip('lock', data[0])
            self.cursor.execute(sql)
            self.mysql.db.commit()
        print (self.cursor.rowcount)
        return datas

    def clearips(self):
        try:
            sql = self.spidersql.deleteineffectualips()  # 保存到数据库
            self.cursor.execute(sql)
            self.mysql.db.commit()
            # data = self.cursor.fetchall()
            # print self.cursor.rowcount
            # print data
            # print "*********exe sql*****************"
        except:
            print ("sql execute err,please check")
            self.mysql.db.rollback()
        else:
            print ("sql execute success")

    def setbaiduandamazon(self,tup):
        id = tup[0]
        baidustatus = tup[1]
        amazonstatus = tup[2]

        sql = self.spidersql.setbaiducanuse(baidustatus, id)
        self.cursor.execute(sql)
        self.mysql.db.commit()

        sql = self.spidersql.setamazoncanuse(amazonstatus, id)
        self.cursor.execute(sql)
        self.mysql.db.commit()

class getBaiduOrAmazon(object):

    @classmethod
    def getips(cls, data):
        ip = data[1]
        id = str(data[0])
        baidu=0
        amazon=0
        if ip is not None and id is not None:
            if cls.getbaidu(ip):
                baidu = 1
            if cls.getamazon(ip):
                amazon = 1
            tuples = (id, baidu, amazon)
            return tuples
        return None

    @classmethod
    def getbaidu(cls, ip):
        head = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36'}
        url = 'https://www.baidu.com/'  # 你用于测试自己ip的网站
        try:
            request = requests.get(url, proxies={'http': 'http://' + ip, 'https': 'https://' + ip}, headers=head,
                                   timeout=10)  # 让问这个网页  随机生成一个ip
            request.encoding = request.apparent_encoding  # 设置编码 encoding 返回的是请求头编码  apparent_encoding 是从内容网页中分析出的响应内容编码方式
            # print(request.text)  # 输出返回的内容
            # filename = "baidu.html"
            # with open(filename, 'wb') as f:
            #     f.write(request.text)
            print ("代理请求成功")
            # sql = self.spidersql.setbaiducanuse(1,id)
            # self.cursor.execute(sql)
            # self.mysql.db.commit()
            return True
        except:
            print ("代理请求失败")
            # sql = self.spidersql.setbaiducanuse(0, id)
            # self.cursor.execute(sql)
            # self.mysql.db.commit()
            return False

    @classmethod
    def getamazon(cls, ip):
        head = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36'}
        url = 'https://www.amazon.com/'  # 你用于测试自己ip的网站
        try:
            request = requests.get(url, proxies={'http': 'http://' + ip, 'https': 'https://' + ip}, headers=head,
                                   timeout=10)  # 让问这个网页  随机生成一个ip
            request.encoding = request.apparent_encoding  # 设置编码 encoding 返回的是请求头编码  apparent_encoding 是从内容网页中分析出的响应内容编码方式
            # print(request.text)  # 输出返回的内容
            # filename = "amazon.html"
            # with open(filename, 'wb') as f:
            #     f.write(request.text)
            print ("代理请求成功")
            # sql = self.spidersql.setamazoncanuse(1, id)
            # self.cursor.execute(sql)
            # self.mysql.db.commit()
            return True
        except:
            print ("代理请求失败")
            # sql = self.spidersql.setamazoncanuse(0, id)
            # self.cursor.execute(sql)
            # self.mysql.db.commit()
            return False

def main():
    """
    主线程
    :return: 
    """
    print ("开始验证ip")
    # 创建锁
    lck = threading.Lock()
    q = Queue.Queue()
    clearips = clearIneffectualips()
    datas = clearips.getNullips()
    clearips.clearips()
    print (datas)
    if datas:
        for data in datas:
            ip = data[1].encode("utf-8") + ":" + str(data[2])
            id = data[0]
            q.put((id, ip))

        for i in range(20):
            t = threading.Thread(target=yanzheng, args=(q, lck, clearips))
            t.start()
    q.join()  #等待队列为空继续主线程

    print("结束")

def yanzheng(queue, lck, clearips):
    while not queue.empty():
        data = queue.get()

        # 在此处调用验证程序
        tup = getBaiduOrAmazon.getips(data)
        if tup is not None:
            # 锁定
            lck.acquire()
            clearips.setbaiduandamazon(tup)
            print (data)
            print ("验证完成")
            # 释放锁
            lck.release()
            queue.task_done()

def job():
    print ("start")
    main()

def getip():
    #subprocess.check_call("cd ../.. && scrapy crawl getproxyips > getip.log", shell=True)
    os.system("cd ../.. && scrapy crawl getproxyips  > getip.log")

if __name__ == "__main__":
    # schedule.every().day.at("12:00").do(job)
    # schedule.every().day.at("10:00").do(job)
    # while True:
    #     schedule.run_pending()
    #     time.sleep(1)
    job()

# def mythringstart():
#     clearips = clearIneffectualips()
#     clearips.clearips()
#     clearips.getips()
#
#
# if __name__ == "__main__":
#     # t = threading.Thread(target=mythringstart)
#     # t.start()
#     while(True):
#         mythringstart()

        