#-*- coding:utf8 -*-
import os
import datetime
import urllib
import urllib2
import json
import hashlib
import math
import dns.resolver as dns
import web
import time
from app.models.site import Site
from app.models.node import Node
from app.models.quest import Quest
from app.models.traffic import Traffic
from app.models.site_node import SiteNode
from app.models.site_traffic import SiteTraffic
from app.helpers.nodes_helper import Helper
from lib.apicn import *
class Quests:
    Site        = Site()
    Node        = Node()
    Quest       = Quest()
    SiteNode    = SiteNode()
    Traffic     = Traffic()
    SiteTraffic = SiteTraffic()
    limit       = 1000
    #网站任务处理
    def sites(self,argc):
        if self.Quest.count_by("type = '%s'" % argc) == 0:
            return False
        quests = self.Quest.get_many_by("type = '%s'" % argc,self.limit)
        for quest in quests:
            self._load_site(argc,quest['sid'],quest['id'])
    #队列处理
    def _load_site(self,argc,sid,qid):
        if argc in [1,2]:
            #验证网站信息
            site = self.Site.get_by("id = '%s' and status = 1" % sid)
            if site is None:
                return False
        #验证cname
        if argc == 1:
            if Helper.get_cname(site.domain) is False:
                return False
            return self.Site.update_by_cname(sid)
        #更新节点配置
        elif argc == 2:
            self._sites_put_to_nodes(site,qid)
        #删除节点网站
        elif argc == 3:
            #1.获取节点关联网站
            if self.SiteNode.count_by("sid = '%s'" % sid) > 0:
                sql = 'select sites_node.sid,nodes.host,nodes.token from nodes join sites_node on nodes.id = sites_node.nid where sid = %s limit 0,%s' % (sid, self.limit)
                for site in self.Site.query(sql):
                    #2.删除该节点对应网站
                    self._delete_node({'host':site.host,'token':site.token,'sid':site.sid})
                #3.删除节点关联
                print self.SiteNode.delete_by("sid ='%s'" % sid)
            #4.删除节点队列任务
            print self.Quest.delete_by("sid = '%s'" % sid)

    #将数据同步到对应节点上
    def _sites_put_to_nodes(self,site,qid):
        #1.获取当前网站对应的分组节点
        if self.Node.count_by("level='%s' and status=1" % site.level) > 0:
            #获取配置
            conf = {
                    'id': site.id,
                    'hash': Helper.hash(site.id),
                    'domain': site.domain,
                    'host': site.host,
                    'port': site.port,
                    'subs': self.Site.get_subs(site.id)
                    }
            nodes = self.Node.get_many_by("level='%s' and status=1" % site.level,self.limit)
            for node in nodes:
                #2.将网站更新至该节点
                print self._update_node(node,conf)
                #3.添加网站与节点关联
                print self.SiteNode.create(conf['id'],node.id)
        #3.删除队列任务
        print self.Quest.delete(qid)

    #更新节点节点网站
    def _update_node(self,node,conf):
        params = {'body': self._nginx_conf(conf)}
        params = urllib.urlencode(params)
        uri = "http://%s/ocdn/conf/update?token=%s&reload=yes&file=vhost/%s.conf" % (node.host,node.token,conf['hash'])
        try:
            return json.loads(urllib2.urlopen(uri,params,timeout = 6).read())['result']
        except:
            return False

    #删除节点网站
    def _delete_node(self,node):
        uri = "http://%s/ocdn/conf/remove?token=%s&reload=yes&file=vhost/%s.conf" % (node['host'],node['token'],Helper.hash(node['sid']))
        api = Helper.get_node_api(uri)
        if api is None:
            return False
        print api['result']

    #nginx配置信息
    def _nginx_conf(self,conf):
        data  = "upstream %s {\n" % conf['hash']
        data += "\tip_hash;\n"
        data += "\tserver %s:%s weight=1 max_fails=2 fail_timeout=30s;\n" % (conf['host'],conf['port'])
        data +="}\n\n"

        data +="server {\n"
        data +="\tlisten 80;\n"
        data +="\tserver_name %s %s;\n" % (conf['domain'], ' '.join(conf['subs']))
        data +="\tgzip on;\n\n"

        data +="\taccess_log /usr/local/opencdn/pipe/bandwidth.pipe bandwidth;\n\n"

        data +="\tif (-d $request_filename) {\n"
        data +="\t\trewrite ^/(.*)([^/])$ $scheme://$host/$1$2/ permanent;\n"
        data +="\t}\n\n"

        data +="\tlocation / {\n"
        data +="\t\tproxy_cache cache_one;\n"
        data +="\t\tproxy_cache_valid 200 304 3h; #Cache for 30Minutes\n"
        data +="\t\tproxy_cache_key $scheme$proxy_host$request_uri;\n"
        data +="\t\tproxy_redirect off;\n"
        data +="\t\tproxy_pass http://%s;\n" % conf['hash']
        data +="\t\tproxy_set_header Host $host;\n"
        data +="\t\tproxy_set_header X-Real-IP $remote_addr;\n"
        data +="\t\tproxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;\n"
        data +="\t\tproxy_set_header Accept-Encoding \"\";\n\n"

        data +="\t\t#Ignore Head\n"
        data +="\t\tproxy_ignore_headers Cache-Control;\n"
        data +="\t\tproxy_ignore_headers Expires;\n"
        data +="\t\tproxy_ignore_headers Set-Cookie;\n\n"

        data +="\t\t#Hide Head\n"
        data +="\t\tproxy_hide_header Cache-Control;\n"
        data +="\t\tproxy_hide_header Expires;\n"
        data +="\t\tproxy_hide_header Set-Cookie;\n"
        data +="\t\tproxy_hide_header X-Pingback;\n"
        data +="\t\tproxy_hide_header X-Powered-By;\n\n"

        data +="\t\tadd_header X-Cache \"$upstream_cache_status\";\n"
        data +="\t\texpires 30m;\n"
        data +="\t}\n\n"

        data +="\t#Cache For Common Static Files\n"
        data +="\tlocation ~ .*\.(ico|jpg|jpeg|bmp|gif|png|js|css)$ {\n"
        data +="\t\tproxy_cache cache_one;\n"
        data +="\t\tproxy_cache_valid 200 304 1d; #Cache for 30Minutes\n"
        data +="\t\tproxy_cache_key $scheme$proxy_host$request_uri;\n"
        data +="\t\tproxy_pass http://%s;\n" % conf['hash']
        data +="\t\tproxy_set_header Host $host;\n"
        data +="\t\tadd_header X-Cache \"$upstream_cache_status\";\n"
        data +="\t\texpires 10d;\n"
        data +="\t}\n\n"

        data +="\t#Not Cache\n"
        data +="\tlocation ~ .*\.(php|jsp|cgi|asp|aspx|flv|swf|xml|do|rar|zip|rmvb|mp3|doc|docx|xls|pdf|gz|tgz|rm|exe)?$ {\n"
        data +="\t\tproxy_pass http://%s;\n" % conf['hash']
        data +="\t\tproxy_set_header Host $host;\n"
        data +="\t\tproxy_redirect off;\n"
        data +="\t\tproxy_set_header X-Real-IP $remote_addr;\n"
        data +="\t\tadd_header X-Cache \"$upstream_cache_status\";\n"
        data +="\t}\n\n"

        data +="\t#Purge Cache\n"
        data +="\tlocation ~ /purge(/.*) {\n"
        data +="\t\tallow all;\n"
        data +="\t\tproxy_cache_purge cache_one $scheme$proxy_host$request_uri;\n"
        data +="\t\terror_page 405 =200 /purge$1;\n"
        data +="\t}\n"
        data +="}"
        return data

    #节点任务处理
    def nodes(self,argc):
        argc = argc + 3
        if self.Quest.count_by("type = '%s'" % argc) == 0:
            return False
        quests = self.Quest.get_many_by("type = '%s'" % argc, self.limit)
        for quest in quests:
            self._load_node(argc,quest['sid'],quest['id'])

    #节点列队处理
    def _load_node(self,argc,sid,qid):
        if argc in [4,5]:
            node = self.Node.get_by("id = '%s' and status =1" % sid)
        elif argc == 6:
            node = self.Node.get_by("id = '%s' and status =3" % sid)
        if node is None:
            return False
        #1.新添加节点
        if argc == 4:
            ##1.获取所有该节点分组网站
            if self.Site.count_by("level = '%s' and (is_cname = 1 and status =1)" % node.level) >0:
                sites = self.Site.get_many_by("level = '%s' and (is_cname = 1 and status =1)" % node.level, self.limit)
                for site in sites:
                    ##2.将网站信息写入该节点
                    print self._sites_put_to_nodes(site,qid)
                    ##3.添加网站节点关联
                    print self.SiteNode.create(site.id,node.id)
            ##4.删除队列任务
            print self.Quest.delete(qid)
        #2.节点更新
        elif argc == 5:
            ##1.获取节点关联网站
            ##2.更新该节点所有配置信息
            ##3.完成并删除该任务
            print node
        #3.节点删除
        elif argc == 6:
            ##1.获取节点所有关联网站
            sites = self.SiteNode.count_by("nid = '%s'" % sid)
            if sites:
                sites = self.SiteNode.get_many_by("nid = '%s'" % sid)
                for site in sites:
                    ##2.删除所有该节点关联的网站
                    print self._delete_node({'host':node.host,'token':node.token,'sid': site.sid})
                #3.删除关联
                print self.SiteNode.delete_by("nid = '%s'" % sid)
            ##4.删除节点信息
            print self.Node.delete(sid)
            ##5.完成并删除该任务
            print self.Quest.delete(qid)

    #流量统计
    def traffics(self,argc):
        #1.每十分获取一次流量
        if argc ==1:
            self._get_site_node_traffic()
        #2.每小时合并一次流量
        elif argc == 2:
            self._create_time_traffic()
        #3.每日统计一次流量
        elif argc == 3:
            self._total_site_traffic()

    #每十分钟获取一次流量
    def _get_site_node_traffic(self):
        #1.节点是否存在网站
        if self.SiteNode.count_by('status = 1'):
            #2.获取所有网站
            sql = 'select * from nodes join sites_node on nodes.id=sites_node.nid where sites_node.status = 1 limit 0,%s' % self.limit
            for node in self.Site.query(sql):
                uri = 'http://%s/ocdn/stream?token=%s&domain=%s' % (node.host,node.token,Helper.hash(node.sid))
                #3.获取该网站流量数据
                data = Helper.get_node_api(uri)
                if data is not None:
                    if data['result'] is True:
                        #4.将数据写入流量数据表中
                        save = dict({'sid':node['sid'],'nid':node['nid']},**data['data']['total'])
                        print self.Traffic.create(save)

    #合并统计流量
    def _create_time_traffic(self):
        #1.获取需要合并的流量
        if self.Traffic.count_by('status = 2'):
            for site in self.Traffic.get_many_by('status =2', self.limit):
                print self.SiteTraffic.create(site.id,dict(site))
        #2.更新临时流量数据
        if self.SiteTraffic.count_by('status = 2'):
            for site in self.SiteTraffic.get_many_by('status = 2', self.limit):
                print self.SiteTraffic.update_by_status(site.id)

    #合并今日所有流量
    def _total_site_traffic(self):
        end = time.strftime("%Y-%m-%d 00:00:00",time.localtime())
        if self.SiteTraffic.count_by("created_at > '%s' and status = 1" % end):
            for traffic in self.SiteTraffic.get_many_by("created_at > '%s' and status = 1" % end, self.limit):
                print traffic

    def check(self,argc):
        if argc == 1:
            self._check_nodes()
        elif argc == 2:
            self._check_sites()
        elif argc == 3:
            pass

    #检测所有节点状态
    def _check_nodes(self):
        #1.统计所有有效节点
        if self.Node.count_by('status = 1'):
            #2.获取所有节点信息
            for node in self.Node.get_many_by('status = 1', self.limit):
                #3.判断节点是否正常
                if Helper.node_check(node.host,node.token) is False:
                    #4.如果异常再次检测3次
                    i = 0
                    for t in xrange(0,3):
                        #5.间隔10s再次检测
                        time.sleep(10)
                        if Helper.node_check(node.host,node.token) is False:
                            i = i + 1
                            #6.3次获取失败则更改node状态
                            if i == 3:
                                print self.Node.update_by_status(node.id)
                                print self._dns_status(node.host,0)
                else:
                    print '1'

    #检测所有网站状态
    def _check_sites(self):
        #1.获取所有有效网站
        if self.SiteNode.count_by('status = 1'):
            sql = 'select * from nodes join sites_node on nodes.id=sites_node.nid where sites_node.status = 1 limit 0,%s' % self.limit
            for node in self.Site.query(sql):
                uri = 'http://%s/ocdn/stream?token=%s&domain=%s&min=3' % (node.host,node.token,Helper.hash(node.sid))
                #3.获取该网站流量数据
                data = Helper.get_node_api(uri)
                try:
                    data['data']['total']['total_count'] > 0
                except:
                    print '0'
                else:
                    print '1'

    #更改节点dns状态
    def _dns_status(self,host,status):
        email       = ''
        key         = ''
        domain_id   = ''
        #获取该域名ID所有解析信息
        api = RecordList(domain_id, email = email, password = key)
        for records in api().get("records"):
            #获取解析的IP与条件IP相同
            if records['value'] == host:
                if records['enabled'] == ['1','0'][status]:
                    enabled = ['disable','enable']
                    data = RecordStatus(['disable','enable'][status], record_id = records['id'], domain_id = domain_id, email = email , password = key)
                    try:
                        data()['status']['code'] == 1
                    except:
                        return False
                    return True
        return False
