import logging
from twisted.internet import reactor
from scrapy.crawler import CrawlerRunner
from scrapy.utils.project import get_project_settings
from scrapy.utils.log import configure_logging
from scrapy.spiderloader import SpiderLoader
from datetime import date
import os
import functools
import requests

def just_one_instance(func):
    '''
    装饰器
    如果已经有实例在跑则退出
    :return:
    '''
    @functools.wraps(func)
    def f(*args,**kwargs):
        import socket
        try:
            # 全局属性，否则变量会在方法退出后被销毁
            global s
            s = socket.socket()
            host = socket.gethostname()
            s.bind((host, 60124))
        except:
            print('already has an instance')
            return None
        return func(*args, **kwargs)
    return f


@just_one_instance
def main():
    from ipproxy.items import IpproxyItem
    from DBUtil import DBConn
    settings = get_project_settings()
    conn = DBConn(host=settings['HOST'], user=settings['USER'], passwd=settings['PASSWD'],
                  database=settings['DATABASE'])

    if conn.qv('select count(1) from t_agent_ip where use_count=0 and proxy_name="mipu"') < 100:
        r = requests.get('https://proxy.mimvp.com/api/fetch.php?orderid=860171021154748781&num=100&'
                         'country_group=1&http_type=1,2&anonymous=5&ping_time=0.3&result_fields=1,2,3,4,5,6,7,8,9')
        ips = r.text.split('\r\n')
        for line in ips:
            row = line.split(',')
            ip = row[0].split(':')[0]
            port = row[0].split(':')[1]
            if conn.qv('select count(1) from t_agent_ip where ip="%s" and port="%s"' % (ip, port)) == 0:
                item = IpproxyItem()
                item['ip'] = ip
                item['port'] = port
                item['anonymous'] = row[2]
                item['proxy_type'] = row[1]
                item['country'] = row[4]
                item['speed'] = float(row[5])
                item['checked_time'] = row[8].replace('\n', '')
                item['proxy_name'] = 'mipu'
                item['use_count'] = '0'
                # print(item)
                conn.insert('t_agent_ip', [item])
    exit(0)
    if not os.path.exists('./logs'):
        os.makedirs('./logs')

    settings = get_project_settings()
    configure_logging(install_root_handler=False)

    logging.basicConfig(
        handlers=[logging.FileHandler('./logs/'+date.today().strftime('%Y_%m_%d') + '.log', 'a', 'utf-8')],
        format='%(asctime)s %(name)-32s %(levelname)-8s: %(message)s',
        level=logging.INFO
    )
    logger = logging.getLogger('Runner')
    logger.info('------------ Spider begin -----------')

    runner = CrawlerRunner(settings)

    sl = SpiderLoader(settings)
    spiders = sl.__getattribute__('_spiders')
    for spider_name in spiders:
        # stop reactor when spider closes
        # runner.signals.connect(spider_closing, signal=signals.spider_closed)
        runner.crawl(spiders[spider_name])
    d = runner.join()
    d.addBoth(lambda _: reactor.stop())
    # blocks process so always keep as the last statement
    reactor.run()
    logger.info('------------ All finished. ------------')


if __name__ == '__main__':
    main()
