#!/usr/bin/env python
# -*- coding:utf-8 -*-

import argparse

from fabric.api import *
from fabric.contrib.files import exists

BASE_PATH = '/usr/app/python/'
TAR_FILE = 'sc-crawler-platform.tar.gz'
APP_PATH = BASE_PATH + 'sc-crawler-platform'
EXCUTE_PATH = APP_PATH + '/example'
APP_NAME = 'crawler_api.py'


def tar_codes():
    with settings(warn_only=True):
        # local("virtualenv sc_env && source sc_env/bin/activate && pip install -r requirements.txt", shell='/bin/bash')
        local("rm */*.pyc", shell='/bin/bash')
        # local("rm example/py_log/*", shell='bin/bash')
        local("tar -zcf {fname} *".format(fname=TAR_FILE), shell='/bin/bash')

def remove_tar():
    with settings(warn_only=True):
        local("rm {fname}".format(fname=TAR_FILE),shell='/bin/bash')

@parallel(pool_size=4)
def deploy_app():
    if not exists(APP_PATH):
        run('mkdir -p ' + APP_PATH)
    with settings(warn_only=True):
        sudo('rm -Rf ' + APP_PATH + '/*')
    put(TAR_FILE, APP_PATH + '/')
    with cd(APP_PATH):
        run('tar -zxf ' + TAR_FILE, shell='/bin/bash')
        run('rm ' + TAR_FILE, shell='/bin/bash')

    # install pymongo
    # with settings(warn_only=True):
    #     sudo('pip install pymongo')
    # with cd(APP_PATH + '/scpy2'):
    #     sudo('python setup.py install')

@parallel(pool_size=4)
def run_app():
    with settings(warn_only=True):
        sudo('pkill -f ' + 'python')
    with cd(EXCUTE_PATH):
        run('$(nohup python recruitment_crawler.py -e test recruitment_crawler_job51 1>/dev/null 2>/dev/null&) && sleep 1')
        run('$(nohup python recruitment_crawler.py -e test recruitment_crawler_lagou 1>/dev/null 2>/dev/null&) && sleep 1')
        run('$(nohup python recruitment_crawler.py -e test recruitment_crawler_zhilian 1>/dev/null 2>/dev/null&) && sleep 1')
        run('$(nohup python resume_crawler.py -e test resume_crawler_juxian 1>/dev/null 2>/dev/null&) && sleep 1')

        # store
        run('$(nohup python recruitment_storer.py -e test recruitment_store_lagou 1>/dev/null 2>/dev/null&) && sleep 1')
        run('$(nohup python resume_storer.py -e test resume_store 1>/dev/null 2>/dev/null&) && sleep 1')

    #     run('source sc_env/bin/activate && $(nohup python {app} >& /dev/null < /dev/null &) && sleep 1'.format(
    #         app=APP_NAME), shell='/bin/bash')

@parallel(pool_size=4)
def kill():
    with settings(warn_only=True):
        run('pkill -f ' + 'python')

@parallel(pool_size=4)
def install_package():
    # sudo('apt-get -y update')
    # sudo('apt-get -y install libpq-dev')
    sudo('pip install -r requirements.txt')



@parallel(pool_size=10)
def test():
    run('pip list | grep xtls')


if __name__ == "__main__":
    """ deploy test ,

    just push file
    test path : /usr/app/python/crawler-api-test
    """
    env.hosts = [
        "54.223.17.90",
        "54.222.241.47",
        "54.222.166.223",
        "54.223.16.213",
        "54.223.24.7",
        "54.223.67.172",
        "54.222.243.217"
        #"xu.du@121.43.150.81",
        #"xu.du@115.29.198.28",
        #"xu.du@120.26.93.125",
        #"xu.du@120.26.93.104",
    ]
    env.user = "ubuntu"
    env.key_filename = "/home/scdev/sc.pem"
    # execute(kill)
    tar_codes()
    execute(deploy_app)
    execute(run_app)
    # execute(install_package)
    # remove_tar()
    #execute(install_package)


