# -*- encoding:utf-8 -*-
# !/usr/bin/env python
# @Date: 2018-10-25 9:44:12
# @Author: FuXueshuang(fuxueshuang@antiy.cn)

# 安全分析

import json
import os
import sys

from flask import Blueprint, g, request

from config import config
from lib import auth
from lib.table import table
from lib.validform import V, ValidateForm

app = Blueprint(__name__ + "_app", __name__)

reload(sys)
sys.setdefaultencoding('utf8')


def getEsType():
    # es_type = ['WF', 'WAF', 'IDS', 'IPS', 'MALWARE', 'C2', 'ILLEGAL-ACCESSS', 'ILLEGAL-USE', 'OS_VUL', 'WEB_VUL', 'VUL', 'LOAD_ANA', 'MALCODE_TRANSTER']
    g.cursor.execute("""select array_agg(en_type_name) as en_type_name, array_agg(type_name) as type_name
                        from h_log_type
                        where log_type not in ('visible') and en_type_name not in ('DEV-AUDIT')""")
    result = g.cursor.fetchone()
    return {
        'en_type_name': result['en_type_name'],
        'type_name': result['type_name']
    }


# 解析9种日志
def makeLog(esData):
    dataInfo = []
    for log in esData:
        if log['_type'] in getEsType()['en_type_name']:
            data = {}
            data['log_id'] = log['_source'].get('id', '')  # 日志唯一标识
            data['log_type'] = log.get('_type', '')  # 日志类型
            data['ts'] = log['_source'].get('ts', '')  # 日志产生时间
            data['log_level'] = log['_source'].get('level', '')  # 日志级别
            if 'sys_info' in log['_source']:
                data['sys_name'] = log['_source']['sys_info']['name']  # 系统名称
            else:
                data['sys_name'] = ''
            data['sys_id'] = log['_source'].get('node', '')  # 设备标识
            # -------------------------------安全监测---------------------------------------
            # 防火墙监测
            if log['_type'] == 'WF':
                # data['sys_id'] = log['_source'].get('sip', '')  # 资产标识
                data['raw_msg'] = log['_source'].get('raw_msg', '')  # 原始报文
                data['sip'] = log['_source'].get('sip', '')  # 源ip
                data['sport'] = log['_source'].get('sport', '')  # 源端口
                data['dip'] = log['_source'].get('dip', '')  # 目的ip
                data['dport'] = log['_source'].get('dport', '')  # 目的端口
                data['response_action'] = log['_source'].get('action', '')  # 响应动作
                data['proto'] = log['_source'].get('proto', '')  # 网络协议
            # WEB应用防火墙
            if log['_type'] == 'WAF':
                # data['sys_id'] = log['_source'].get('dip', '')  # 资产标识
                data['raw_msg'] = log['_source'].get('raw_msg', '')  # 原始报文
                data['sip'] = log['_source'].get('sip', '')  # 源ip
                data['sport'] = log['_source'].get('sport', '')  # 源端口
                data['dip'] = log['_source'].get('dip', '')  # 目的ip
                data['dport'] = log['_source'].get('dport', '')  # 目的端口
                data['response_action'] = log['_source'].get('action', '')  # 响应动作
                data['url'] = log['_source'].get('url', '')  # url
                data['tag'] = ','.join(log['_source'].get('tag', ''))
            # 入侵检测
            if log['_type'] == 'IDS':
                data['raw_msg'] = log['_source'].get('raw_msg', '')  # 原始报文
                data['sip'] = log['_source'].get('sip', '')  # 源ip
                data['sport'] = log['_source'].get('sport', '')  # 源端口
                data['dip'] = log['_source'].get('dip', '')  # 目的ip
                data['dport'] = log['_source'].get('dport', '')  # 目的端口
                data['proto'] = log['_source'].get('proto', '')  # 网络协议
                data['event_desc'] = log['_source'].get('desc', '')  # 入侵事件描述
            # 入侵防御
            if log['_type'] == 'IPS':
                data['raw_msg'] = log['_source'].get('raw_msg', '')  # 原始报文
                data['sip'] = log['_source'].get('sip', '')  # 源ip
                data['sport'] = log['_source'].get('sport', '')  # 源端口
                data['dip'] = log['_source'].get('dip', '')  # 目的ip
                data['dport'] = log['_source'].get('dport', '')  # 目的端口
                data['event_desc'] = log['_source'].get('desc', '')  # 入侵事件描述
                data['proto'] = log['_source'].get('proto', '')  # 网络协议
                data['response_action'] = log['_source'].get('action', '')  # 响应动作
            # 恶意代码检测
            if log['_type'] == 'MALWARE':
                # data['sys_id'] = log['_source'].get('host', '')  # 资产标识
                data['md5s'] = log['_source'].get('md5', '')  # md5
                data['behavior_name_level'] = ','.join(log['_source'].get('behavior', ''))  # 行为名称&等级
                data['malname'] = log['_source'].get('malname', '')  # 恶意代码名称
                data['virus_type'] = log['_source'].get('virus_type', '')  # 威胁类型
                data['virus_family'] = log['_source'].get('virus_family', '')  # 恶意代码家族
                data['virus_behavior'] = ','.join(log['_source'].get('behavior', ''))  # 恶意行为
            # c&c
            if log['_type'] == 'C2':
                # data['sys_id'] = log['_source'].get('host', '')  # 资产标识
                data['raw_msg'] = log['_source'].get('raw_msg', '')  # 原始报文
                data['dip'] = log['_source'].get('dip', '')  # 目的ip
                data['dport'] = log['_source'].get('port', '')  # 目的端口
                data['proto'] = log['_source'].get('proto', '')  # 网络协议
                data['url'] = log['_source'].get('url', '')  # url
                data['md5s'] = log['_source'].get('md5', '')  # md5
                data['control_content'] = log['_source'].get('msg', '')  # 远控指令内容
            # 违规外联
            if log['_type'] == 'ILLEGAL-ACCESSS':
                # data['sys_id'] = log['_source'].get('host', '')  # 资产标识
                data['response_action'] = log['_source'].get('action', '')  # 响应动作
                data['md5s'] = log['_source'].get('md5', '')  # md5
                data['url'] = log['_source'].get('url', '')  # url
            # 违规接入
            if log['_type'] == 'ILLEGAL-USE':
                # data['sys_id'] = log['_source'].get('host', '')  # 资产标识
                data['response_action'] = log['_source'].get('action', '')  # 响应动作
                data['md5s'] = log['_source'].get('md5', '')  # md5
                data['dev_name'] = log['_source'].get('dev_name', '')  # 接入设备名称
                data['dev_id'] = log['_source'].get('dev_id', '')  # 接入设备标识
                data['action'] = log['_source'].get('op', '')  # 操作
            # 系统漏洞
            if log['_type'] == 'OS_VUL':
                # data['sys_id'] = log['_source'].get('node', '')  # 系统/设备唯一标识
                data['cve_id'] = log['_source'].get('cve_id', '')  # 漏洞cve编号
                data['vul_name'] = log['_source'].get('vul_name', '')  # 漏洞名称
                data['vul_type'] = log['_source'].get('vul_type', '')  # 漏洞类型
                data['disposal_state'] = log['_source'].get('status', '')  # 处置状态
            # web应用漏洞
            if log['_type'] == 'WEB_VUL':
                # data['sys_id'] = log['_source'].get('node', '')  # 系统/设备唯一标识
                data['url'] = log['_source'].get('url', '')  # url
                data['vul_name'] = log['_source'].get('vul_name', '')  # 漏洞名称
                data['vul_type'] = log['_source'].get('vul_type', '')  # 漏洞类型
                data['disposal_state'] = log['_source'].get('status', '')  # 处置状态
            # -------------------------------按需监测---------------------------------------
            # 脆弱性
            if log['_type'] == 'VUL':
                # data['sys_id'] = log['_source'].get('node', '')  # 系统/设备唯一标识
                data['cve_id'] = log['_source'].get('cve_id', '')  # 漏洞cve编号
                data['vul_name'] = log['_source'].get('vul_name', '')  # 漏洞名称
                data['vul_type'] = log['_source'].get('vul_type', '')  # 漏洞类型
                data['disposal_state'] = log['_source'].get('status', '')  # 处置状态
            # 载荷深度分析
            if log['_type'] == 'LOAD_ANA':
                data['md5s'] = log['_source']['file'].get('md5', '')  # md5
                data['malname'] = log['_source'].get('malname', '')  # 恶意代码名称
            # 恶意代码传输
            if log['_type'] == 'MALCODE_TRANSTER':
                # data['sys_id'] = log['_source'].get('sip', '')
                data['sip'] = log['_source'].get('sip', '')  # 源ip
                data['dip'] = log['_source'].get('dip', '')  # 目的ip
                data['proto'] = log['_source'].get('proto', '')  # 网络协议
                data['malname'] = log['_source'].get('malname', '')  # 恶意代码名称
            dataInfo.append(data)
    return {'data': dataInfo}


# 自动分析--获取攻击统计区
@app.route('/api/analyze/autoAnalyze/getAttackStatistical', methods=['GET'])
@auth.permission("analyze", "readOnly")
def analyze_autoAnalyze_getAttackStatistical():
    try:
        data = {
            "dataSource": {
                "waves": [
                    {
                        "r_time": "2019-02-19",
                        "all_risk_cnt": 3,
                        "analysis_risk_cnt": 2,
                        "send_report_view": "belong_view"
                    }
                ],
                "ruleStatistical": {
                    "recurring": "1",
                    "untreated": "2",
                    "behavior": "3",
                    "type": "4",
                    "other": "5"
                },
                "analysisStatistical": {
                    "underway": 5,
                    "finish": 2
                },
                "total": 1
            }
        }
        g.cursor.execute("""select case when b.all_risk_cnt is null then 0 else b.all_risk_cnt end as all_risk_cnt,
                                case when b.analysis_risk_cnt is null then 0 else b.analysis_risk_cnt end as analysis_risk_cnt,
                                a.r_time
                            from (
                                WITH RECURSIVE t(r_time) AS (
                                    SELECT date_trunc('hour',CURRENT_TIMESTAMP-interval '1 day')
                                  UNION ALL
                                    SELECT r_time-interval '1 day' FROM t
                                )
                                SELECT substr(d2t(r_time),1,10) as r_time
                                FROM t
                                LIMIT 15
                            )a
                            left join (
                                select autoanalysis_cnt as all_risk_cnt,analysised_cnt as analysis_risk_cnt,substr(d2t(static_date),1,10) as r_time
                                from h_autoanalysis_data
                                order by substr(d2t(static_date),1,10) desc
                                limit 15
                            )b
                            on a.r_time=b.r_time
                            order by a.r_time asc
                        """)
        data['dataSource']['waves'] = g.cursor.fetchall()
        g.cursor.execute("""select coalesce(sum(case when a.key_word_type='1' then 1 else 0 end), 0) as recurring,
                                coalesce(sum(case when a.key_word_type='2' then 1 else 0 end), 0) as untreated,
                                -- coalesce(sum(case when a.key_word_type='3' then 1 else 0 end), 0) as behavior,
                                -- coalesce(sum(case when a.key_word_type='4' then 1 else 0 end), 0) as type,
                                coalesce(sum(case when a.key_word_type='5' then 1 else 0 end), 0) as other
                            from (
                                 select a.event_id as attack_event_id,d.auto_analysis_result as rules,a.event_name as attack_event_name,d2t(a.start_time) as start_time,
                                    a.victim_unit,a.info_sys_name as victim_system,
                                    case when a.threaten_level='1' then '低危'
                                        when a.threaten_level='2' then '中危'
                                        when a.threaten_level='3' then '高危'
                                        when a.threaten_level='4' then '严重' end as threaten_level,
                                    a.handle_status,coalesce(b.analysis_count, 0) as analyze_count,
                                    case when c.analysis_id is null then '/'
                                        else c.is_risk_event end as conclusion,
                                    case when c.analysis_id is null then '1'
                                        else c.analysis_state end as analyze_state,
                                    d.key_word_type
                                from (
                                    select a.event_id, a.event_name, a.start_time,       -- 攻击事件
                                        c.victim_unit, d.info_sys_name, a.threaten_level, a.handle_status
                                    from h_attack_eventinfo a
                                    left join h_event_victimperson b
                                    on a.event_id=b.event_id
                                    left join h_victim_info c
                                    on c.assert_id=b.assert_id and b.sys_id = c.sys_id
                                    left join h_information_system d
                                    on c.sys_id=d.info_sys_id
                                    group by a.event_id, a.event_name, a.start_time,
                                        c.victim_unit, d.info_sys_name, a.threaten_level, a.handle_status
                                )a
                                left join (
                                    select max(a.analysis_id) as analysis_id,    -- 分析
                                        count(distinct case when a.analysis_state='1' then a.analysis_id end) as analysis_count, a.event_id
                                    from h_analysis_info a
                                    group by a.event_id
                                )b
                                on a.event_id=b.event_id
                                left join (
                                    select a.analysis_id,a.analysis_state,b.is_risk_event    -- 分析结论
                                    from h_analysis_info a
                                    left join h_analysis_conclusion b
                                    on b.analysis_id=a.analysis_id
                                ) c
                                on b.analysis_id=c.analysis_id
                                left join (
                                    select max(b.rule_id) as rule_id, a.event_id, b.auto_analysis_result, c.rule_item, c.key_word_type    -- 规则
                                    from h_attack_beacon a
                                    left join h_auto_analysis_result b
                                    on a.beacon_id=b.beacon_id
                                    left join h_auto_analysis_rule c
                                    on c.rule_id=b.rule_id
                                    where c.state='1'
                                    group by a.event_id, b.auto_analysis_result, c.rule_item, c.key_word_type
                                )d
                                on a.event_id=d.event_id
                                where d.rule_id is not null
                            )a
                        """)
        data['dataSource']['ruleStatistical'] = g.cursor.fetchone()
        g.cursor.execute("""select coalesce(sum(case when a.analyze_count>0 then 1 else 0 end), 0) as finish,
                                coalesce(sum(case when a.analyze_count=0 then 1 else 0 end), 0) as underway,
                                coalesce(count(a.attack_event_id), 0) as total
                            from (
                                 select a.event_id as attack_event_id,a.event_name as attack_event_name,d2t(a.start_time) as start_time,
                                    a.victim_unit,a.info_sys_name as victim_system,a.threaten_level,
                                    a.handle_status,coalesce(b.analysis_count, 0) as analyze_count,
                                    case when c.analysis_id is null then '/'
                                        else c.is_risk_event end as conclusion,
                                    case when c.analysis_id is null then '1'
                                        else c.analysis_state end as analyze_state
                                from (
                                    select a.event_id, a.event_name, a.start_time,       -- 攻击事件
                                        c.victim_unit, d.info_sys_name, a.threaten_level, a.handle_status
                                    from h_attack_eventinfo a
                                    left join h_event_victimperson b
                                    on a.event_id=b.event_id
                                    left join h_victim_info c
                                    on c.assert_id=b.assert_id and b.sys_id = c.sys_id
                                    left join h_information_system d
                                    on c.sys_id=d.info_sys_id
                                    group by a.event_id, a.event_name, a.start_time,
                                        c.victim_unit, d.info_sys_name, a.threaten_level, a.handle_status
                                )a
                                left join (
                                    select max(a.analysis_id) as analysis_id,    -- 分析
                                        count(distinct case when a.analysis_state='1' then a.analysis_id end) as analysis_count, a.event_id
                                    from h_analysis_info a
                                    group by a.event_id
                                )b
                                on a.event_id=b.event_id
                                left join (
                                    select a.analysis_id,a.analysis_state,    -- 分析结论
                                        case when b.is_risk_event='0' then '未生成风险事项'
                                            when b.is_risk_event='1' then '生成风险事项'
                                            else b.is_risk_event end as is_risk_event
                                    from h_analysis_info a
                                    left join h_analysis_conclusion b
                                    on b.analysis_id=a.analysis_id
                                ) c
                                on b.analysis_id=c.analysis_id
                                left join (
                                    select max(b.rule_id) as rule_id, a.event_id, string_agg(distinct c.rule_item, ',') as auto_analysis_result    -- 规则
                                    from h_attack_beacon a
                                    left join h_auto_analysis_result b
                                    on a.beacon_id=b.beacon_id
                                    left join h_auto_analysis_rule c
                                    on b.rule_id=c.rule_id
                                    where c.state='1'
                                    group by a.event_id
                                )d
                                on a.event_id=d.event_id
                                where d.rule_id is not null
                            )a
                        """)
        data['dataSource']['analysisStatistical'] = g.cursor.fetchone()
        data['dataSource']['total'] = data['dataSource']['analysisStatistical']['total']
        return json.dumps({'status': 'success', 'data': data})
    except Exception, e:
        return json.dumps({"status": "fail", "msg": "查询失败", "e": str(e)})


# 自动分析--获取攻击表格
@app.route('/api/analyze/autoAnalyze/getAttackTableDate', methods=['POST'])
@auth.permission("analyze", "readOnly")
def analyze_autoAnalyze_getAttackTableDate():
    try:
        # data = {
        #     "total": 5,
        #     "dataSource": [
        #         {
        #             "attack_event_id": 1,
        #             "rules": "反复出现、长期未处置",
        #             "attack_event_name": '江西省信息中心遭受C&C攻击',
        #             "start_time": "2019-02-20 14:07:16",
        #             "victim_unit": "某组织",
        #             "victim_system": "受害系统",
        #             "threaten_level": "轻微",
        #             "handle_status": "威胁消除",
        #             "conclusion": "生成风险事项",
        #             "analyze_count": 3,
        #             "analyze_state": '0'
        #         }
        #     ]
        # }
        data = {
            "total": 0,
            "dataSource": []
        }
        tb = table()
        g.cursor.execute("""select count(1) as total
                            from (
                                select a.event_id as attack_event_id,d.auto_analysis_result as rules,a.event_name as attack_event_name,d2t(a.start_time) as start_time,
                                    a.victim_unit,a.info_sys_name as victim_system,
                                    case when a.threaten_level='1' then '低 危'
                                        when a.threaten_level='2' then '中 危'
                                        when a.threaten_level='3' then '高 危'
                                        when a.threaten_level='4' then '严 重' end as threaten_level,
                                    case when a.handle_status='fix' then '已修复' else '未修复' end as handle_status,
                                    coalesce(b.analysis_count, 0) as analyze_count,
                                    case when c.analysis_id is null then '-'
                                        else c.is_risk_event end as conclusion,
                                    case when c.analysis_id is null then '1'
                                        else c.analysis_state end as analyze_state
                                from (
                                    select a.event_id, a.event_name, a.start_time,       -- 攻击事件
                                        c.victim_unit, d.info_sys_name, a.threaten_level, a.handle_status
                                    from h_attack_eventinfo a
                                    left join h_event_victimperson b
                                    on a.event_id=b.event_id
                                    left join h_victim_info c
                                    on c.assert_id=b.assert_id and b.sys_id = c.sys_id
                                    left join h_information_system d
                                    on c.sys_id=d.info_sys_id
                                    group by a.event_id, a.event_name, a.start_time,
                                        c.victim_unit, d.info_sys_name, a.threaten_level, a.handle_status
                                )a
                                left join (
                                    select max(a.analysis_id) as analysis_id,    -- 分析
                                        count(distinct a.analysis_id) as analysis_count, a.event_id
                                    from h_analysis_info a
                                    group by a.event_id
                                )b
                                on a.event_id=b.event_id
                                left join (
                                    select a.analysis_id,a.analysis_state,    -- 分析结论
                                        case when b.is_risk_event='0' then '未生成风险事项'
                                            when b.is_risk_event='1' then '生成风险事项'
                                            else '-' end as is_risk_event
                                    from h_analysis_info a
                                    left join h_analysis_conclusion b
                                    on b.analysis_id=a.analysis_id
                                ) c
                                on b.analysis_id=c.analysis_id
                                left join (
                                    select max(b.rule_id) as rule_id, a.event_id, string_agg(distinct c.rule_item, ',') as auto_analysis_result    -- 规则
                                    from h_attack_beacon a
                                    left join h_auto_analysis_result b
                                    on a.beacon_id=b.beacon_id
                                    left join h_auto_analysis_rule c
                                    on b.rule_id=c.rule_id
                                    where c.state='1'
                                    group by a.event_id
                                )d
                                on a.event_id=d.event_id
                                where d.rule_id is not null
                            ) a
                            %s
                """ % tb.where(['a.attack_event_name', 'a.victim_unit', 'a.victim_system']))
        data['total'] = g.cursor.fetchone()['total']
        g.cursor.execute("""select a.attack_event_id,a.rules,
                                a.attack_event_name,a.start_time,
                                a.victim_unit,a.victim_system, a.threaten_level,
                                a.handle_status,a.conclusion,a.analyze_count,a.analyze_state
                            from (
                                select a.event_id as attack_event_id,d.auto_analysis_result as rules,a.event_name as attack_event_name,d2t(a.start_time) as start_time,
                                    a.victim_unit,a.info_sys_name as victim_system,
                                    case when a.threaten_level='1' then '低 危'
                                        when a.threaten_level='2' then '中 危'
                                        when a.threaten_level='3' then '高 危'
                                        when a.threaten_level='4' then '严 重' end as threaten_level,
                                    case when a.handle_status='fix' then '已修复' else '未修复' end as handle_status,
                                    coalesce(b.analysis_count, 0) as analyze_count,
                                    case when c.analysis_id is null then '-'
                                        else c.is_risk_event end as conclusion,
                                    case when c.analysis_id is null then '1'
                                        else c.analysis_state end as analyze_state
                                from (
                                    select a.event_id, a.event_name, a.start_time,       -- 攻击事件
                                        c.victim_unit, d.info_sys_name, a.threaten_level, a.handle_status
                                    from h_attack_eventinfo a
                                    left join h_event_victimperson b
                                    on a.event_id=b.event_id
                                    left join h_victim_info c
                                    on c.assert_id=b.assert_id and b.sys_id = c.sys_id
                                    left join h_information_system d
                                    on c.sys_id=d.info_sys_id
                                    group by a.event_id, a.event_name, a.start_time,
                                        c.victim_unit, d.info_sys_name, a.threaten_level, a.handle_status
                                )a
                                left join (
                                    select max(a.analysis_id) as analysis_id,    -- 分析
                                        count(distinct case when a.analysis_state='1' then a.analysis_id end) as analysis_count, a.event_id
                                    from h_analysis_info a
                                    group by a.event_id
                                )b
                                on a.event_id=b.event_id
                                left join (
                                    select a.analysis_id,a.analysis_state,    -- 分析结论
                                        case when b.is_risk_event='0' then '未生成风险事项'
                                            when b.is_risk_event='1' then '生成风险事项'
                                            else '-' end as is_risk_event
                                    from h_analysis_info a
                                    left join h_analysis_conclusion b
                                    on b.analysis_id=a.analysis_id
                                ) c
                                on b.analysis_id=c.analysis_id
                                left join (
                                    select max(b.rule_id) as rule_id, a.event_id, string_agg(distinct c.rule_item, ',') as auto_analysis_result    -- 规则
                                    from h_attack_beacon a
                                    left join h_auto_analysis_result b
                                    on a.beacon_id=b.beacon_id
                                    left join h_auto_analysis_rule c
                                    on b.rule_id=c.rule_id
                                    where c.state='1'
                                    group by a.event_id
                                )d
                                on a.event_id=d.event_id
                                where d.rule_id is not null
                            )a
                            %s %s %s %s
                        """ % (tb.where(['a.attack_event_name', 'a.victim_unit', 'a.victim_system']), tb.orderBy(), tb.offset(), tb.limit()))
        data['dataSource'] = g.cursor.fetchall()
        return json.dumps({'status': 'success', 'data': data})
    except Exception, e:
        return json.dumps({"status": "fail", "msg": "查询失败", "e": str(e)})


# 人工分析--获取攻击统计区
@app.route('/api/analyze/Analyze/getPerAttackStatistical', methods=['GET'])
@auth.permission("analyze", "readOnly")
def analyze_analyze_getPerAttackStatistical():
    try:
        data = {
            "dataSource": {
                "waves": [
                    {
                        "r_time": "2019-02-19",
                        "all_risk_cnt": 12,
                        "analysis_risk_cnt": 2,
                        "send_report_view": "belong_view"
                    }
                ],
                "risk_level": {
                    "level1": "1",
                    "level2": "2",
                    "level3": "3",
                    "level4": "4"
                },
                "analysisStatistical": {
                    "underway": 6,
                    "finish": 3
                },
                "total": 2
            }
        }
        g.cursor.execute("""select case when b.all_risk_cnt is null then 0 else b.all_risk_cnt end as all_risk_cnt,
                                case when b.analysis_risk_cnt is null then 0 else b.analysis_risk_cnt end as analysis_risk_cnt,
                                a.r_time
                            from (
                                WITH RECURSIVE t(r_time) AS (
                                    SELECT date_trunc('hour',CURRENT_TIMESTAMP-interval '1 day')
                                  UNION ALL
                                    SELECT r_time-interval '1 day' FROM t
                                )
                                SELECT substr(d2t(r_time),1,10) as r_time
                                FROM t
                                LIMIT 15
                            )a
                            left join (
                                select artificialanalysis_cnt as all_risk_cnt,analysised_cnt as analysis_risk_cnt,substr(d2t(static_date),1,10) as r_time
                                from h_artificialanalysis_data
                                order by substr(d2t(static_date),1,10) desc
                                limit 15
                            )b
                            on a.r_time=b.r_time
                            order by a.r_time asc
                        """)
        data['dataSource']['waves'] = g.cursor.fetchall()
        g.cursor.execute("""select coalesce(sum(case when a.threaten_level='1' then 1 else 0 end), 0) as level1,
                                coalesce(sum(case when a.threaten_level='2' then 1 else 0 end), 0) as level2,
                                coalesce(sum(case when a.threaten_level='3' then 1 else 0 end), 0) as level3,
                                coalesce(sum(case when a.threaten_level='4' then 1 else 0 end), 0) as level4
                            from (
                               select a.event_id as attack_event_id,a.event_name as attack_event_name,d2t(a.start_time) as start_time,
                                    a.victim_unit,a.info_sys_name as victim_system,a.threaten_level,
                                    a.handle_status
                                from (
                                    select a.event_id, a.event_name, a.start_time,       -- 攻击事件
                                        c.victim_unit, d.info_sys_name, a.threaten_level, a.handle_status
                                    from h_attack_eventinfo a
                                    left join h_event_victimperson b
                                    on a.event_id=b.event_id
                                    left join h_victim_info c
                                    on c.assert_id=b.assert_id and b.sys_id = c.sys_id
                                    left join h_information_system d
                                    on c.sys_id=d.info_sys_id
                                    group by a.event_id, a.event_name, a.start_time,
                                        c.victim_unit, d.info_sys_name, a.threaten_level, a.handle_status
                                )a
                            )a
                        """)
        data['dataSource']['risk_level'] = g.cursor.fetchone()
        g.cursor.execute("""select coalesce(sum(case when a.analyze_count>0 then 1 else 0 end), 0) as finish,
                                coalesce(sum(case when a.analyze_count=0 then 1 else 0 end), 0) as underway,
                                coalesce(count(a.attack_event_id), 0) as total
                            from (
                               select a.event_id as attack_event_id,a.event_name as attack_event_name,d2t(a.start_time) as start_time,
                                    a.victim_unit,a.info_sys_name as victim_system,
                                    case when a.threaten_level='1' then '低危'
                                        when a.threaten_level='2' then '中危'
                                        when a.threaten_level='3' then '高危'
                                        when a.threaten_level='4' then '严重' end as threaten_level,
                                    a.handle_status,coalesce(b.analysis_count, 0) as analyze_count,
                                    case when c.analysis_id is null then '-'
                                        else c.is_risk_event end as conclusion,
                                    case when c.analysis_id is null then '1'
                                        else c.analysis_state end as analyze_state
                                from (
                                    select a.event_id, a.event_name, a.start_time,       -- 攻击事件
                                        c.victim_unit, d.info_sys_name, a.threaten_level, a.handle_status
                                    from h_attack_eventinfo a
                                    left join h_event_victimperson b
                                    on a.event_id=b.event_id
                                    left join h_victim_info c
                                    on c.assert_id=b.assert_id and b.sys_id = c.sys_id
                                    left join h_information_system d
                                    on c.sys_id=d.info_sys_id
                                    group by a.event_id, a.event_name, a.start_time,
                                        c.victim_unit, d.info_sys_name, a.threaten_level, a.handle_status
                                )a
                                left join (
                                    select max(a.analysis_id) as analysis_id,    -- 分析
                                        count(distinct case when a.analysis_state='1' then a.analysis_id end) as analysis_count, a.event_id
                                    from h_analysis_info a
                                    group by a.event_id
                                )b
                                on a.event_id=b.event_id
                                left join (
                                    select a.analysis_id,a.analysis_state,b.is_risk_event    -- 分析结论
                                    from h_analysis_info a
                                    left join h_analysis_conclusion b
                                    on b.analysis_id=a.analysis_id
                                ) c
                                on b.analysis_id=c.analysis_id
                            )a
                        """)
        data['dataSource']['analysisStatistical'] = g.cursor.fetchone()
        data['dataSource']['total'] = data['dataSource']['analysisStatistical']['total']
        return json.dumps({'status': 'success', 'data': data})
    except Exception:
        return json.dumps({"status": "fail", "msg": "查询失败"})


# 人工分析--获取攻击表格
@app.route('/api/analyze/analyze/getPerAttackTableDate', methods=['POST'])
@auth.permission("analyze", "readOnly")
def analyze_analyze_getPerAttackTableDate():
    try:
        # data = {
        #     "total": 5,
        #     "dataSource": [
        #         {
        #             "attack_event_id": 1,
        #             "attack_event_name": '江西省信息中心遭受C&C攻击',
        #             "last_time": "2019-02-20 14:07:16",
        #             "victim_unit": "某组织",
        #             "victim_system": "受害系统",
        #             "risk_level": "轻微",
        #             "content": "攻击内容",
        #             "dis_status": "威胁消除",
        #             "conclusion": "未生成风险事项",
        #             "analyze_count": 1,
        #             "analyze_state": '0'
        #         }
        # }
        data = {
            "total": 0,
            "dataSource": []
        }
        tb = table()
        g.cursor.execute("""select count(1) as total
                            from (
                                select a.event_id as attack_event_id,a.event_name as attack_event_name,d2t(a.start_time) as start_time,
                                    a.victim_unit,a.info_sys_name as victim_system,
                                    case when a.threaten_level='1' then '低 危'
                                        when a.threaten_level='2' then '中 危'
                                        when a.threaten_level='3' then '高 危'
                                        when a.threaten_level='4' then '严 重' end as threaten_level,
                                    case when a.handle_status='fix' then '已修复' else '未修复' end as handle_status,
                                    coalesce(b.analysis_count, 0) as analyze_count,
                                    case when c.analysis_id is null then '-'
                                        else c.is_risk_event end as conclusion,
                                    case when c.analysis_id is null then '1'
                                        else c.analysis_state end as analyze_state
                                from (
                                    select a.event_id, a.event_name, a.start_time,       -- 攻击事件
                                        c.victim_unit, d.info_sys_name, a.threaten_level, a.handle_status
                                    from h_attack_eventinfo a
                                    left join h_event_victimperson b
                                    on a.event_id=b.event_id
                                    left join h_victim_info c
                                    on c.assert_id=b.assert_id and b.sys_id = c.sys_id
                                    left join h_information_system d
                                    on c.sys_id=d.info_sys_id
                                    group by a.event_id, a.event_name, a.start_time,
                                        c.victim_unit, d.info_sys_name, a.threaten_level, a.handle_status
                                )a
                                left join (
                                    select max(a.analysis_id) as analysis_id,    -- 分析
                                        count(distinct case when a.analysis_state='1' then a.analysis_id end) as analysis_count, a.event_id
                                    from h_analysis_info a
                                    group by a.event_id
                                )b
                                on a.event_id=b.event_id
                                left join (
                                    select a.analysis_id,a.analysis_state,   -- 分析结论
                                        case when b.is_risk_event='0' then '未生成风险事项'
                                            when b.is_risk_event='1' then '生成风险事项'
                                            else '-' end as is_risk_event
                                    from h_analysis_info a
                                    left join h_analysis_conclusion b
                                    on b.analysis_id=a.analysis_id
                                ) c
                                on b.analysis_id=c.analysis_id
                            ) a
                            %s
                """ % tb.where(['a.attack_event_name', 'a.victim_unit', 'a.victim_system']))
        data['total'] = g.cursor.fetchone()['total']
        g.cursor.execute("""select a.attack_event_id,
                                a.attack_event_name,a.start_time,
                                a.victim_unit,a.victim_system, a.threaten_level,
                                a.handle_status,a.conclusion,a.analyze_count,a.analyze_state
                            from (
                               select a.event_id as attack_event_id,a.event_name as attack_event_name,d2t(a.start_time) as start_time,
                                    a.victim_unit,a.info_sys_name as victim_system,
                                    case when a.threaten_level='1' then '低 危'
                                        when a.threaten_level='2' then '中 危'
                                        when a.threaten_level='3' then '高 危'
                                        when a.threaten_level='4' then '严 重' end as threaten_level,
                                    case when a.handle_status='fix' then '已修复' else '未修复' end as handle_status,
                                    coalesce(b.analysis_count, 0) as analyze_count,
                                    case when c.analysis_id is null then '-'
                                        else c.is_risk_event end as conclusion,
                                    case when c.analysis_id is null then '1'
                                        else c.analysis_state end as analyze_state
                                from (
                                    select a.event_id, a.event_name, a.start_time,       -- 攻击事件
                                        c.victim_unit, d.info_sys_name, a.threaten_level, a.handle_status
                                    from h_attack_eventinfo a
                                    left join h_event_victimperson b
                                    on a.event_id=b.event_id
                                    left join h_victim_info c
                                    on c.assert_id=b.assert_id and b.sys_id = c.sys_id
                                    left join h_information_system d
                                    on c.sys_id=d.info_sys_id
                                    group by a.event_id, a.event_name, a.start_time,
                                        c.victim_unit, d.info_sys_name, a.threaten_level, a.handle_status
                                )a
                                left join (
                                    select max(a.analysis_id) as analysis_id,    -- 分析
                                        count(distinct case when a.analysis_state='1' then a.analysis_id end) as analysis_count, a.event_id
                                    from h_analysis_info a
                                    group by a.event_id
                                )b
                                on a.event_id=b.event_id
                                left join (
                                    select a.analysis_id,a.analysis_state,    -- 分析结论
                                        case when b.is_risk_event='0' then '未生成风险事项'
                                            when b.is_risk_event='1' then '生成风险事项'
                                            else '-' end as is_risk_event
                                    from h_analysis_info a
                                    left join h_analysis_conclusion b
                                    on b.analysis_id=a.analysis_id
                                ) c
                                on b.analysis_id=c.analysis_id
                            )a
                            %s %s %s %s
                        """ % (tb.where(['a.attack_event_name', 'a.victim_unit', 'a.victim_system']), tb.orderBy(), tb.offset(), tb.limit()))
        data['dataSource'] = g.cursor.fetchall()
        return json.dumps({'status': 'success', 'data': data})
    except Exception, e:
        return json.dumps({"status": "fail", "msg": "查询失败", "e": str(e)})


# 自动分析--规则配置
@app.route('/api/analyze/autoAnalyze/getRuleConfig', methods=['GET'])
@auth.permission("analyze", "readOnly")
def autoAnalyze_getRuleConfig():
    try:
        data = {
            'dataSource': {
                'fragile': [],
                'attack': []
            }
        }
        g.cursor.execute("""select rule_id as id, rule_type as rtype, rule_item as name,
                                   time_span_value as tvalue, time_span_unit as tunit, false as tErrorHint,
                                   risk_level_condition as roperator, concat(risk_level,'级') as rlevel,
                                   repeated_condition as foperator, repeated_times as fvalue, false as fErrorHint,
                                   key_word_type as keywordtype, key_word as keyword, key_word_type as keytype,
                                   false as change
                            from h_auto_analysis_rule
                            where state='1'
                            order by rule_type, rule_item asc
                        """)
        data['dataSource']['attack'] = g.cursor.fetchall()
        return json.dumps({'status': 'success', 'data': data})
    except Exception:
        return json.dumps({"status": "fail", "msg": "查询失败"})


# 自动分析--新增规则
@app.route('/api/analyze/autoAnalyze/addCustomRule', methods=['POST'])
@auth.permission("analyze", "readOnly")
def analyze_autoAnalyze_addCustomRule(_currUser):
    try:
        form = ValidateForm(
            name=['规则名称', V.required()],
            # keywordtype=['关键字类型', V.required()],
            keyword=['关键字', V.required()],
            rsource=['规则来源', V.required()]
        )
        (flag, data) = form.validate()
        if not flag:
            data['status'] = 'fail'
            return json.dumps(data)

        data['person_id'] = _currUser['user']['person_id']
        g.cursor.execute("""insert into h_auto_analysis_rule(
                                rule_type, rule_item, key_word_type, key_word,
                                r_person, state, r_time)
                            values('1', %(name)s, '5', %(keyword)s,
                                   %(person_id)s, '1', now())""", (data))
        g.conn.commit()
        return json.dumps({'status': 'success', 'data': data})
    except Exception:
        return json.dumps({"status": "fail", "msg": "查询失败"})


# 自动分析--保存规则
@app.route('/api/analyze/autoAnalyze/ruleSave', methods=['POST'])
@auth.permission("analyze", "readOnly")
def analyze_autoAnalyze_ruleSave(_currUser):
    try:
        td = table()
        ruleConfigs = td.dict['ruleConfigs']
        for rule in ruleConfigs['attack']:
            if rule['rtype'] == '0':
                if rule['change']:
                    rule['rlevel'] = rule['rlevel'].replace('级', '')
                    g.cursor.execute("""update h_auto_analysis_rule
                                            set state=0
                                        where rule_id=%(id)s
                                    """, rule)
                    rule['person_id'] = _currUser['user']['person_id']
                    g.cursor.execute("""insert into h_auto_analysis_rule(
                                            rule_item, rule_type,
                                            time_span_value, time_span_unit,
                                            risk_level_condition, risk_level,
                                            repeated_condition, repeated_times,
                                            r_person, key_word_type, state, r_time)
                                        values (
                                            %(name)s, '0', %(tvalue)s, %(tunit)s,
                                            %(roperator)s, %(rlevel)s,
                                            %(foperator)s, %(fvalue)s,
                                            %(person_id)s, '%(keytype)s', 1', now()
                                        )
                                    """, rule)
        g.conn.commit()
        return json.dumps({'status': 'success'})
    except Exception, e:
        return json.dumps({"status": "fail", "msg": "查询失败", 'e': str(e)})


# 自动分析--触发自动分析
@app.route('/api/analyze/autoAnalyze/teigeerAnalyze', methods=['GET'])
@auth.permission("analyze", "readOnly")
def analyze_autoAnalyze_teigeerAnalyze(_currUser):
    try:
        data = {}
        data['person_id'] = _currUser['user']['person_id']
        g.cursor.execute("""select auto_analysis(%(person_id)s)""", data)
        g.conn.commit()
        return json.dumps({'status': 'success'})
    except Exception, e:
        return json.dumps({"status": "fail", "msg": "查询失败", 'e': str(e)})


# 自动分析--删除规则
@app.route('/api/analyze/autoAnalyze/delRule', methods=['POST'])
@auth.permission("analyze", "readOnly")
def analyze_autoAnalyze_delRule(_currUser):
    try:
        form = ValidateForm(
            ruleId=['规则编号', V.optional()]
        )
        (flag, data) = form.validate()
        if not flag:
            data['status'] = 'fail'
            return json.dumps(data)

        g.cursor.execute("""update h_auto_analysis_rule
                                set state=0
                            where rule_id=%(ruleId)s
                        """, data)
        g.conn.commit()
        return json.dumps({'status': 'success'})
    except Exception, e:
        return json.dumps({"status": "fail", "msg": "查询失败", 'e': str(e)})


# 深度分析--获取事件信息&影响分析（over）
@app.route('/api/analyze/depthAnalyze/getDepthAnalysisEventInfo', methods=['POST'])
@auth.permission("analyze", "readOnly")
def analyze_depthAnalyze_getDepthAnalysisEventInfo(_currUser):
    try:
        form = ValidateForm(
            analyzeId=['分析编号', V.optional()],
            attackEventId=['攻击事件编号', V.optional()],
            analysisSource=['分析来源', V.optional()]
        )
        (flag, data) = form.validate()
        if not flag:
            data['status'] = 'fail'
            return json.dumps(data)

        data['person_id'] = _currUser['user']['person_id']
        dataInfo = {
            "dataSource": {
                "attack_event_id": 1,
                "analyze_id": None,
                "event_name": "xx事件",
                "threaten_level": "轻微",
                "attack_num": 12,
                "start_time": "2019-02-22 15:34:35",
                "new_time": "2019-02-23 14:01:11",
                "describe": "描述xxxxxxxx",
                # "solution": "解决方案xxxxxxxxxx",
                "elements": "要素1,要素2,要素3,要素4,要素5,要素6,要素11,要素12,要素13,要素14,要素15,要素16",
                "analyze_elements": "要素1",
                "impactAnalysis": [
                    {
                        "element": "要素1",
                        "affect_unit": 5,
                        "affect_system": 5,
                        "affect_node": 20,
                        "element_source": "原始要素"
                    }
                ],
                "potentialImpacts": [
                    {
                        "element": "要素1",
                        "affect_unit": 5,
                        "affect_system": 5,
                        "affect_node": 20,
                        "element_source": "人工添加要素",
                        "task_id": 1,
                        "task_name": "任务1",
                        "task_schedule": "进行中"
                    },
                    {
                        "element": "要素2",
                        "affect_unit": 5,
                        "affect_system": 5,
                        "affect_node": 20,
                        "element_source": "人工添加要素",
                        "task_id": None,
                        "task_name": '-',
                        "task_schedule": '-'
                    }
                ],
                "investigationTask": [
                    {
                        "task_id": 1,
                        "task_name": "任务1",
                        "r_time": "2019-02-22 15:34:35",
                        "r_person": "某某某",
                        "task_type": "类型1",
                        "task_state": "进行中"
                        # "describe": "描述"
                    }
                ]
            }
        }
        sql = ''
        if data['analyzeId']:
            sql = "b.analysis_id='%s'" % data['analyzeId']
            eventSql = "b.analysis_id='%s'" % data['analyzeId']
        else:
            sql = "b.event_id='%s'" % data['attackEventId']
            eventSql = "a.event_id='%s'" % data['attackEventId']
        # 事件基本信息
        g.cursor.execute("""select a.event_id as attack_event_id,b.analysis_id,a.event_name,a.threaten_level,a.attack_num,
                                   a.start_time,a.new_time,a.event_desc,c.analysis_state
                                from (
                                    select a.event_id, a.event_name, a.attack_num,      -- 攻击事件
                                        case when a.threaten_level='1' then '低危'
                                            when a.threaten_level='2' then '中危'
                                            when a.threaten_level='3' then '高危'
                                            when a.threaten_level='4' then '严重' end as threaten_level,
                                        d2t(a.start_time) as start_time,d2t(a.new_time) as new_time,a.event_desc
                                    from h_attack_eventinfo a
                                )a
                                left join (
                                    select max(a.analysis_id) as analysis_id,    -- 分析
                                        count(distinct case when a.analysis_state='1' then a.analysis_id end) as analysis_count, a.event_id
                                    from h_analysis_info a
                                    group by a.event_id
                                )b
                                on a.event_id=b.event_id
                                left join (
                                    select a.analysis_id,a.analysis_state    -- 当前分析
                                    from h_analysis_info a
                                ) c
                                on b.analysis_id=c.analysis_id
                                where %s
                        """ % eventSql)
        dataInfo['dataSource'] = g.cursor.fetchone()
        if (dataInfo['dataSource']['analysis_state'] == '1') or (dataInfo['dataSource']['analysis_state'] is None):
            # 分析已完成或者没有分析
            # 新建分析插入事件分析信息表 返回分析编号
            g.cursor.execute("""insert into h_analysis_info(
                                    event_id, analysis_title, analysis_source, analysis_state,
                                    analysis_person, analysis_start_time)
                                select a.event_id,a.event_name||'深度分析' as analysis_title,
                                    %(analysisSource)s as analysis_source, '0' as analysis_state,
                                    %(person_id)s as analysis_person, now() as analysis_start_time
                                from h_attack_eventinfo a
                                where a.event_id=%(attackEventId)s
                                returning analysis_id
                            """, data)
            g.conn.commit()
            data['analyzeId'] = g.cursor.fetchone()['analysis_id']
            dataInfo['dataSource']['analysis_id'] = data['analyzeId']
            # 插入分析要素信息表
            g.cursor.execute("""insert into h_analysis_element_info(
                                    analysis_id, element_name, element_source, element_source_step, element_type,
                                    state, r_person, r_time)
                                select %(analyzeId)s as analysis_id, a.sign_value as element_name, '1' as element_source,
                                    '0' as element_source_step, a.sign_type as element_type,
                                    '1' as state, %(person_id)s as r_person, now() as r_time
                                from h_attack_beacon a
                                where a.event_id=%(attackEventId)s
                                returning element_id, element_name, element_type
                            """, data)
            g.conn.commit()
            element_names = g.cursor.fetchall()
            values = ''
            # 从es中搜索日志插入分析要素日志明细表 -- h_element_detail
            for element in element_names:
                # ---------------------
                data['es_data'] = {
                    "query": {
                        "term": {
                            element['element_type']: element['element_name']
                        }
                    },
                    "sort": {
                        "ts": {
                            "order": "desc"
                        }
                    }
                }
                esData = g.es.search(index='AIR', doc_type=getEsType()['en_type_name'], body=data['es_data'])
                if esData['hits']['total'] > 0:
                    values = values + ", (%s, '%s', '%s')" % (element['element_id'], element['element_name'], json.dumps(makeLog(esData['hits']['hits'])))
            if values:
                # 插入es数据存储表
                g.cursor.execute("""insert into h_analytical_es(
                                        element_id, element_name, json_data)
                                    values %s
                                """ % (values[1:]))
                g.conn.commit()
            # ---------------------
            # # 插入分析要素影响信息表 -- h_element_effect_info
            # g.cursor.execute("""insert into h_element_effect_info(
            #                         element_id, r_time, sys_node_num, info_sys_num, agency_num,
            #                         effect_sys_node, effect_info_sys, effect_agency)
            #                     select el.element_id, now() as r_time,
            #                         count(distinct c.sys_node_id) as sys_node_num,
            #                         count(distinct c.info_sys_id) as info_sys_num,
            #                         count(distinct c.agency_id) as agency_num,
            #                         string_agg(c.sys_node_id, ',') as effect_sys_node,
            #                         string_agg(c.info_sys_id, ',') as effect_info_sys,
            #                         string_agg(c.agency_id, ',') as effect_agency
            #                     from (
            #                         select element_id
            #                         from h_analysis_element_info
            #                         where analysis_id=%(analyzeId)s
            #                     ) el
            #                     left join h_element_detail a
            #                     on el.element_id = a.element_id
            #                     left join h_information_system b
            #                     on a.sys_id=b.info_sys_id
            #                     left join h_system_node c
            #                     on c.info_sys_id=b.info_sys_id
            #                     group by el.element_id
            #                 """, data)
            # g.conn.commit()
            # 插入分析相关信息表 -- h_related_analysis_info
            g.cursor.execute("""insert into h_related_analysis_info(
                                    analysis_id, related_type, related_title, related_id, related_title_type,
                                    related_element_name, r_person, r_time)
                                select a.analysis_id, '4' as related_type, c.event_name as related_title,b.event_id as related_id,'1' as related_title_type,
                                    a.element_name as related_element_name, %(person_id)s as r_person, now() as r_time
                                from h_analysis_info d
                                left join h_analysis_element_info a
                                on d.analysis_id=a.analysis_id
                                left join h_attack_beacon b
                                on a.element_name=b.sign_value
                                left join h_attack_eventinfo c
                                on b.event_id=c.event_id
                                where c.event_id!=d.event_id and b.event_id is not null and a.analysis_id=%(analyzeId)s
                                union all
                                select a.analysis_id, '3' as related_type, c.risk_event_name as related_title, b.risk_event_id::varchar as related_id,'2' as related_title_type,
                                    a.element_name as related_element_name, %(person_id)s as r_person, now() as r_time
                                from h_analysis_info d
                                left join h_analysis_element_info a
                                on d.analysis_id=a.analysis_id
                                left join h_event_element_info b
                                on a.element_name=b.element_name
                                left join h_risk_event_info c
                                on b.risk_event_id=c.risk_event_id
                                where b.risk_event_id is not null and a.analysis_id=%(analyzeId)s and c.analysis_id not in (
                                    select a.analysis_id
                                    from h_analysis_info a
                                    where a.event_id = %(attackEventId)s
                                )
                            """, data)
            g.conn.commit()
        # ------------------查找影响与潜在影响------------------------
        # 影响分析
        g.cursor.execute("""select a.element_id,a.element_type,a.element_name as element,a.agency_num as affect_unit,
                                a.info_sys_num as affect_system,a.sys_node_num as affect_node,
                                a.element_source as element_source,
                                case when c.log_num is null then 0 else c.log_num end as log_num, c.log_ids,
                                coalesce(json_agg(d) filter (where d.element_id is not null), '[]') AS tasks
                            from (
                                select a.analysis_id,b.element_id, b.element_name,
                                    coalesce(c.agency_num, 0) as agency_num,
                                    coalesce(c.info_sys_num, 0) as info_sys_num,
                                    coalesce(c.sys_node_num, 0) as sys_node_num,
                                    b.element_source,    -- 分析要素
                                    case when b.element_type is null then '' else b.element_type end as element_type
                                from h_analysis_info a
                                left join h_analysis_element_info b
                                on b.analysis_id=a.analysis_id
                                left join h_element_effect_info c
                                on b.element_id=c.element_id
                                where b.state='1' and b.element_source_step='0'
                            ) a
                            left join (
                                select max(a.analysis_id) as analysis_id,    -- 分析
                                    count(distinct case when a.analysis_state='1' then a.analysis_id end) as analysis_count, a.event_id
                                from h_analysis_info a
                                group by a.event_id
                            ) b
                            on a.analysis_id=b.analysis_id
                            left join (             --日志
                                select a.element_id, count(distinct a.log_id) log_num, string_agg(a.log_id, ',') as log_ids
                                from h_element_detail a
                                group by a.element_id
                            ) c
                            on a.element_id=c.element_id
                            left join (              --任务
                                select a.element_id, a.task_id, a.task_name,
                                    case when a.task_state='1' then '已下发'
                                        when a.task_state='2' then '已完成'
                                        when a.task_state='3' then '超时未反馈' end as task_state,
                                    b.successback||'/'||b.allback as feedback
                                from h_survey_task_info a
                                LEFT JOIN (
                                    select a.task_id,
                                        count(distinct case when a.feedback_state='0' then a.r_id end) as noback,
                                        count(distinct case when a.feedback_state='1' then a.r_id end) as successback,
                                        count(distinct a.r_id) as allback
                                    from h_survey_task_aim a
                                    group by a.task_id
                                ) b
                                on a.task_id=b.task_id
                            ) d
                            on a.element_id=d.element_id
                            where %s
                            group by a.element_id,a.element_type,a.element_name,a.agency_num,
                                a.info_sys_num,a.sys_node_num,
                                a.element_source,c.log_num,c.log_ids
                        """ % sql)
        dataInfo['dataSource']['impactAnalysis'] = g.cursor.fetchall()
        # # 影响分析任务列表
        # for item in dataInfo['dataSource']['impactAnalysis']:
        #     g.cursor.execute("""select a.task_id, a.task_name,
        #                             case when a.task_state='1' then '已下发'
        #                                 when a.task_state='2' then '已完成'
        #                                 when a.task_state='3' then '超时未反馈' end as task_state
        #                         from h_survey_task_info a
        #                         where a.element_id = %(element_id)s
        #                     """, item)
        #     item['tasks'] = g.cursor.fetchall()
        # 潜在影响分析
        g.cursor.execute("""select a.element_id,a.element_type,a.element_name as element,a.agency_num as affect_unit,
                                a.info_sys_num as affect_system,a.sys_node_num as affect_node,
                                a.element_source as element_source,
                                case when c.log_num is null then 0 else c.log_num end as log_num, c.log_ids,
                                coalesce(json_agg(d) filter (where d.element_id is not null), '[]') AS tasks
                            from (
                                select a.analysis_id,b.element_id, b.element_name,
                                    coalesce(c.agency_num, 0) as agency_num,
                                    coalesce(c.info_sys_num, 0) as info_sys_num,
                                    coalesce(c.sys_node_num, 0) as sys_node_num,
                                    b.element_source,    -- 分析要素
                                    case when b.element_type is null then '' else b.element_type end as element_type
                                from h_analysis_info a
                                left join h_analysis_element_info b
                                on b.analysis_id=a.analysis_id
                                left join h_element_effect_info c
                                on b.element_id=c.element_id
                                where b.state='1' and b.element_source_step='1'
                            ) a
                            left join (
                                select max(a.analysis_id) as analysis_id,    -- 分析
                                    count(distinct case when a.analysis_state='1' then a.analysis_id end) as analysis_count, a.event_id
                                from h_analysis_info a
                                group by a.event_id
                            ) b
                            on a.analysis_id=b.analysis_id
                            left join (             --日志
                                select a.element_id, count(distinct a.log_id) log_num, string_agg(a.log_id, ',') as log_ids
                                from h_element_detail a
                                group by a.element_id
                            ) c
                            on a.element_id=c.element_id
                            left join (              --任务
                                select a.element_id, a.task_id, a.task_name,
                                    case when a.task_state='1' then '已下发'
                                        when a.task_state='2' then '已完成'
                                        when a.task_state='3' then '超时未反馈' end as task_state,
                                    b.successback||'/'||b.allback as feedback
                                from h_survey_task_info a
                                LEFT JOIN (
                                    select a.task_id,
                                        count(distinct case when a.feedback_state='0' then a.r_id end) as noback,
                                        count(distinct case when a.feedback_state='1' then a.r_id end) as successback,
                                        count(distinct a.r_id) as allback
                                    from h_survey_task_aim a
                                    group by a.task_id
                                ) b
                                on a.task_id=b.task_id
                            ) d
                            on a.element_id=d.element_id
                            where %s
                            group by a.element_id,a.element_type,a.element_name,a.agency_num,
                                a.info_sys_num,a.sys_node_num,
                                a.element_source,c.log_num,c.log_ids
                        """ % sql)
        dataInfo['dataSource']['potentialImpacts'] = g.cursor.fetchall()
        # # 潜在影响分析任务列表
        # for item in dataInfo['dataSource']['potentialImpacts']:
        #     g.cursor.execute("""select a.task_id, a.task_name,
        #                             case when a.task_state='1' then '已下发'
        #                                 when a.task_state='2' then '已完成'
        #                                 when a.task_state='3' then '超时未反馈' end as task_state
        #                         from h_survey_task_info a
        #                         where a.element_id = %(element_id)s
        #                     """, item)
        #     item['tasks'] = g.cursor.fetchall()
        return json.dumps({'status': 'success', 'data': dataInfo})
    except Exception, e:
        return json.dumps({"status": "fail", "msg": "查询失败", 'e': str(e)})


# 深度分析--传入选中信标获取相关信息
@app.route('/api/analyze/depthAnalyze/getRelatedInfo', methods=['POST'])
@auth.permission("analyze", "readOnly")
def analyze_depthAnalyze_getRelatedInfo():
    try:
        form = ValidateForm(
            analyzeId=['分析编号', V.required()]
        )
        (flag, data) = form.validate()
        if not flag:
            data['status'] = 'fail'
            return json.dumps(data)

        dataInfo = {
            'dataSource': {
                'relatedAttacks': [],  # 相关攻击事件
                'relatedAttacksResult': [],  # 相关攻击事件-信标
                'relatedRisks': [],  # 相关风险事项
                'relatedRisksResult': [],  # 相关风险事项-信标
                'relatedInformation': [],  # 相关情报
                'relatedInformationResult': []  # 相关情报-信标
            }
        }
        # 相关攻击事件
        g.cursor.execute("""select a.analysis_id,string_agg(a.id::varchar,',') as ids, b.event_id,b.event_name,
                                case when b.threaten_level='1' then '低危'
                                    when b.threaten_level='2' then '中危'
                                    when b.threaten_level='3' then '高危'
                                    when b.threaten_level='4' then '严重' end as threaten_level,
                                b.attack_num,b.event_desc as describe,
                                string_agg(distinct a.related_element_name, '<br>') as associated_elements
                            from h_related_analysis_info a
                            left join h_attack_eventinfo b
                            on a.related_id=b.event_id
                            where a.related_title_type='1' and a.analysis_id=%(analyzeId)s
                            group by a.analysis_id,b.event_id,b.event_name,b.threaten_level,b.attack_num,b.event_desc
                        """, data)
        dataInfo['dataSource']['relatedAttacks'] = g.cursor.fetchall()
        g.cursor.execute("""select a.analysis_id,string_agg(a.id::varchar,',') as ids,
                                a.related_element_name as associated_elements
                            from h_related_analysis_info a
                            where a.related_title_type='1' and a.analysis_id=%(analyzeId)s
                            group by a.analysis_id,a.related_element_name
                        """, data)
        dataInfo['dataSource']['relatedAttacksResult'] = g.cursor.fetchall()
        # 相关风险事项
        g.cursor.execute("""select a.analysis_id, string_agg(a.id::varchar,',') as ids,b.risk_event_id,b.risk_event_name as event_name,
                                case when b.risk_event_level='1' then '一般'
                                    when b.risk_event_level='2' then '较大'
                                    when b.risk_event_level='3' then '重大'
                                    when b.risk_event_level='4' then '特大' end as risk_level,
                                b.risk_event_type as event_type,
                                string_agg(distinct a.related_element_name, '<br>') as associated_elements
                            from h_related_analysis_info a
                            left join h_risk_event_info b
                            on a.related_id::int=b.risk_event_id
                            where a.related_title_type='2' and a.analysis_id=%(analyzeId)s
                            group by a.analysis_id,b.risk_event_id,b.risk_event_name,b.risk_event_level,b.risk_event_type
                        """, data)
        dataInfo['dataSource']['relatedRisks'] = g.cursor.fetchall()
        g.cursor.execute("""select a.analysis_id,string_agg(a.id::varchar,',') as ids,
                                a.related_element_name as associated_elements
                            from h_related_analysis_info a
                            where a.related_title_type='2' and a.analysis_id=%(analyzeId)s
                            group by a.analysis_id,a.related_element_name
                        """, data)
        dataInfo['dataSource']['relatedRisksResult'] = g.cursor.fetchall()
        return json.dumps({'status': 'success', 'data': dataInfo})
    except Exception, e:
        return json.dumps({"status": "fail", "msg": "查询失败", 'e': str(e)})


# 深度分析--保存
@app.route('/api/analyze/depthAnalyze/postSaveAnalysis', methods=['POST'])
@auth.permission("analyze", "readOnly")
def analyze_depthAnalyze_postSaveAnalysis(_currUser):
    try:
        form = ValidateForm(
            analyzeId=['分析编号', V.required()],
            analysisConclusion=['保存数据集合', V.required()]
        )
        (flag, data) = form.validate()
        if not flag:
            data['status'] = 'fail'
            return json.dumps(data)

        data['person_id'] = _currUser['user']['person_id']
        data['analysisConclusion'] = eval(data['analysisConclusion'])
        data['analysisConclusion']['person_id'] = data['person_id']
        data['analysisConclusion']['analyzeId'] = data['analyzeId']
        data['risk_event_id'] = 0
        data['risk_event_name'] = data['analysisConclusion']['eventName']
        if data['analysisConclusion']['checked'] == 1:
            if data['analysisConclusion']['basis']:
                data['analysisConclusion']['basis'] = "%s/%s" % (config.MAN_ANALYSIS_SAVE, data['analysisConclusion']['basis'])
        else:
            data['analysisConclusion']['basis'] = ''
        # 插入分析结论信息表
        g.cursor.execute("""insert into h_analysis_conclusion(
                                    analysis_id, is_risk_event, analysis_basis,risk_event_name,
                                    risk_event_type, risk_event_level, risk_event_content, solution,
                                    remark, r_person, r_time)
                            values (%(analyzeId)s, %(checked)s, %(basis)s, %(eventName)s,
                                    %(eventType)s, %(eventLevel)s, %(describe)s, %(solution)s,
                                    %(remark)s, %(person_id)s, now())
                            returning analysis_id
                        """, data['analysisConclusion'])
        g.conn.commit()
        # 更新事件分析信息表分析状态
        g.cursor.execute("""update h_analysis_info
                            set analysis_state='1'
                            where analysis_id=%(analyzeId)s
                        """, data)
        g.conn.commit()
        if data['analysisConclusion']['checked'] == 1:
            # 生成风险事项
            # 插入风险事项
            g.cursor.execute("""insert into h_risk_event_info(
                                        analysis_id, risk_event_name, risk_event_level,
                                        risk_event_type, risk_event_content, solution, annex_address,
                                        remark, state, early_warn, exist_update, submit_person, submit_time,
                                        r_person, r_time, analysis_basis, is_commit)
                                select a.analysis_id, risk_event_name, risk_event_level,
                                    risk_event_type, risk_event_content, solution, analysis_basis as annex_address,
                                    remark, '1' as state, '0' as early_warn, '0' as exist_update, %(person_id)s as submit_person, now() as submit_time,
                                    %(person_id)s as r_person, now() as r_time, analysis_basis as analysis_basis, '0' as is_commit
                                from h_analysis_conclusion a
                                where a.analysis_id=%(analyzeId)s
                                returning risk_event_id
                            """, data)
            g.conn.commit()
            data['risk_event_id'] = g.cursor.fetchone()['risk_event_id']
            # 插入事项相关信息表
            g.cursor.execute("""insert into h_event_related_analysis_info(
                                    risk_event_id, related_type, related_title,
                                    related_id, related_title_type,
                                    other_links, related_element_name)
                                select %(risk_event_id)s as risk_event_id, a.related_type, a.related_title,
                                    related_id, related_title_type,
                                    other_links, related_element_name
                                from h_related_analysis_info a
                                where a.analysis_id=%(analyzeId)s
                            """, data)
            g.conn.commit()
            # 插入事项要素信息表
            g.cursor.execute("""insert into h_event_element_info(
                                        risk_event_id, element_name, element_type, element_source)
                                select %(risk_event_id)s as risk_event_id, a.element_name, a.element_type, a.element_source
                                from h_analysis_element_info a
                                where a.analysis_id=%(analyzeId)s
                                returning element_id
                            """, data)
            g.conn.commit()
            # element_ids = g.cursor.fetchall()
            # 插入事项要素影响信息表
            g.cursor.execute("""insert into h_event_effect_info(
                                    element_id, sys_node_num, info_sys_num, agency_num, r_time,
                                    effect_sys_node, effect_info_sys, effect_agency)
                                select c.element_id, b.sys_node_num, b.info_sys_num, b.agency_num, now() as r_time,
                                    effect_sys_node, effect_info_sys, effect_agency
                                from h_analysis_element_info a
                                left join h_element_effect_info b
                                on a.element_id=b.element_id
                                left join h_event_element_info c
                                on a.element_name=c.element_name
                                where a.analysis_id=%(analyzeId)s and c.risk_event_id=%(risk_event_id)s
                            """, data)
            # 插入事项要素日志明细表
            g.cursor.execute("""insert into h_event_element_detail(
                                    element_id, log_id, log_type, ts, sys_id, sys_name, log_level,
                                    unit_id, raw_msg, sip, sport, sip_city, sip_country, dip, dport,
                                    dip_city, dip_country, proto, response_action, url, tag, event_desc,
                                    md5s, behavior_name_level, malname, virus_type, virus_family,
                                    virus_behavior, control_content, dev_name, dev_id, action, cve_id,
                                    vul_name, vul_type, client_ip, operate_user, result, action_state,
                                    flow_file_type, flow_data_detail, final_result, is_malicious,
                                    file_type, network_monitor, disposal_state, r_time)
                                select c.element_id,log_id, log_type, ts, sys_id, sys_name, log_level,
                                    unit_id, raw_msg, sip, sport, sip_city, sip_country, dip, dport,
                                    dip_city, dip_country, proto, response_action, url, tag, event_desc,
                                    md5s, behavior_name_level, malname, virus_type, virus_family,
                                    virus_behavior, control_content, dev_name, dev_id, action, cve_id,
                                    vul_name, vul_type, client_ip, operate_user, result, action_state,
                                    flow_file_type, flow_data_detail, final_result, is_malicious,
                                    file_type, network_monitor, disposal_state, now() as r_time
                                from h_analysis_element_info a
                                left join h_element_detail b
                                on a.element_id=b.element_id
                                left join h_event_element_info c
                                on a.element_name=c.element_name
                                where b.log_id is not null and a.analysis_id=%(analyzeId)s and c.risk_event_id=%(risk_event_id)s
                            """, data)
            g.conn.commit()
        # 插入操作流水表
        g.cursor.execute("""insert into h_analysis_event_record(
                                source, event_id,
                                analysis_source, analysis_id, analysis_state, analysis_result,
                                risk_event_id, risk_event_name, operate, r_person, r_time)
                            select '1' as source, a.event_id as event_id,
                                a.analysis_source as analysis_source, a.analysis_id, a.analysis_state, b.is_risk_event as analysis_result,
                                %(risk_event_id)s as risk_event_id, b.risk_event_name, '深度分析' as operate, %(person_id)s as r_person, now() as r_time
                            from h_analysis_info a
                            left join h_analysis_conclusion b
                            on a.analysis_id=b.analysis_id
                            where a.analysis_id=%(analyzeId)s
                        """, data)
        g.conn.commit()
        return json.dumps({'status': 'success', 'data': data})
    except Exception, e:
        return json.dumps({"status": "fail", "msg": "查询失败", 'e': str(e)})


# 深度分析--验证风险事项名称（over）
@app.route('/api/analyze/depthAnalyze/validataEventname', methods=['POST'])
@auth.permission("analyze", "readOnly")
def analyze_depthAnalyze_validataEventname():
    try:
        form = ValidateForm(
            riskeventname=['风险事项名称', V.required()]
        )
        (flag, data) = form.validate()
        if not flag:
            data['status'] = 'fail'
            return json.dumps(data)

        g.cursor.execute("""select count(a.risk_event_id) as total
                            from h_risk_event_info a
                            where a.state='1' and a.risk_event_name=%(riskeventname)s
                        """, data)
        data['total'] = g.cursor.fetchone()['total']
        return json.dumps({'status': 'success', 'data': data})
    except Exception, e:
        return json.dumps({"status": "fail", "msg": "查询失败", 'e': str(e)})


# 深度分析--添加要素-获取日志类型（over）
@app.route('/api/analyze/depthAnalyze/getLogTypes', methods=['GET'])
@auth.permission("analyze", "readOnly")
def analyze_depthAnalyze_getLogTypes():
    try:
        data = {}
        g.cursor.execute("""select en_type_name as value, type_name as text
                            from h_log_type
                            where log_type not in ('visible', 'audit')
                        """)
        data['types'] = g.cursor.fetchall()
        data['levels'] = [
            {
                'text': '低危',
                'value': 1
            }, {
                'text': '中危',
                'value': 2
            }, {
                'text': '高危',
                'value': 3
            }, {
                'text': '严重',
                'value': 4
            }]
        data['belongArea'] = [
            {
                'text': '西北',
                'value': '西北'
            }, {
                'text': '中南',
                'value': '中南'
            }, {
                'text': '华东',
                'value': '华东'
            }, {
                'text': '西南',
                'value': '西南'
            }, {
                'text': '东北',
                'value': '东北'
            }, {
                'text': '新疆',
                'value': '新疆'
            }, {
                'text': '华北',
                'value': '华北'
            }]
        return json.dumps({'status': 'success', 'data': data})
    except Exception, e:
        return json.dumps({"status": "fail", "msg": "查询失败", 'e': str(e)})


# 深度分析--添加要素-查找日志信息（over）
@app.route('/api/analyze/depthAnalyze/getPotentialAddLog', methods=['POST'])
@auth.permission("analyze", "readOnly")
def analyze_depthAnalyze_getPotentialAddLog(_currUser):
    try:
        form = ValidateForm(
            elementType=['要素类型', V.optional()],
            element=['要素名称', V.required()],
            esData=['esData', V.required()]
        )
        (flag, data) = form.validate()
        if not flag:
            data['status'] = 'fail'
            return json.dumps(data)

        info = {}
        data['element'] = data['element'].lstrip().rstrip()
        data['person_id'] = _currUser['user']['person_id']
        # 搜索次要素名称的所有告警日志
        info['esData'] = g.es.search(index='AIR', doc_type=getEsType()['en_type_name'], body=json.loads(data['esData']))
        return json.dumps({'status': 'success', 'data': info})
    except Exception, e:
        return json.dumps({"status": "fail", "msg": "查询失败", 'e': str(e)})


# 深度分析--编辑要素-查找选中日志信息（no）
@app.route('/api/analyze/depthAnalyze/getCheckedLog', methods=['POST'])
@auth.permission("analyze", "readOnly")
def analyze_depthAnalyze_getCheckedLog():
    try:
        data = {
            "total": 0,
            "dataSource": []
        }
        tb = table()

        if tb.dict['elementStep'] == 'impact':
            tb.dict['elementStep'] = '0'
        elif tb.dict['elementStep'] == 'potential':
            tb.dict['elementStep'] = '1'
        g.cursor.execute("""select string_agg(a.log_id, '|') as log_ids
                            from (
                                select a.log_id
                                from h_element_detail a
                                left join h_analysis_element_info b
                                on a.element_id=b.element_id
                                where b.analysis_id=%s and b.element_name='%s' and b.element_source_step='%s'
                            ) a
                        """ % (tb.dict['analyzeId'], tb.dict['element'], tb.dict['elementStep']))
        data['log_ids'] = g.cursor.fetchone()['log_ids']
        g.cursor.execute("""select count(a.log_id) as total
                            from (
                                select a.element_id, a.log_id, a.log_type, d2t(a.ts) as ts, a.sys_id, a.sys_name, a.log_level,
                                    a.unit_id, a.raw_msg, a.sip, a.sport, a.sip_city, a.sip_country, a.dip, a.dport,
                                    a.dip_city, a.dip_country, a.proto, a.response_action, a.url, a.tag, a.event_desc,
                                    a.md5s, a.behavior_name_level, a.malname, a.virus_type, a.virus_family, a.
                                    virus_behavior, a.control_content, a.dev_name, a.dev_id, a.action, a.cve_id,
                                    a.vul_name, a.vul_type, a.client_ip, a.operate_user, a.result, a.action_state,
                                    a.flow_file_type, a.flow_data_detail, a.final_result, a.is_malicious,
                                    a.file_type, a.network_monitor, a.disposal_state
                                from h_element_detail a
                                left join h_analysis_element_info b
                                on a.element_id=b.element_id
                                where b.analysis_id=%s and b.element_name='%s' and b.element_source_step='%s'
                            ) a
                           %s
                        """ % (tb.dict['analyzeId'], tb.dict['element'], tb.dict['elementStep'], tb.where()))
        data['total'] = g.cursor.fetchone()['total']
        g.cursor.execute("""select a.agency_name,a.element_id, a.log_id, a.log_type, a.ts, a.sys_id, a.sys_name, a.log_level,
                                a.unit_id, a.raw_msg, a.sip, a.sport, a.sip_city, a.sip_country, a.dip, a.dport,
                                a.dip_city, a.dip_country, a.proto, a.response_action, a.url, a.tag, a.event_desc,
                                a.md5s, a.behavior_name_level, a.malname, a.virus_type, a.virus_family, a.
                                virus_behavior, a.control_content, a.dev_name, a.dev_id, a.action, a.cve_id,
                                a.vul_name, a.vul_type, a.client_ip, a.operate_user, a.result, a.action_state,
                                a.flow_file_type, a.flow_data_detail, a.final_result, a.is_malicious,
                                a.file_type, a.network_monitor, a.disposal_state
                            from (
                                select case when e.agency_name is null then '-' else e.agency_name end as agency_name,
                                    case when a.sys_name is null then '-' else a.sys_name end as sys_name,
                                    a.element_id, a.log_id, a.log_type, d2t(a.ts) as ts, a.sys_id, a.log_level,
                                    a.unit_id, a.raw_msg, a.sip, a.sport, a.sip_city, a.sip_country, a.dip, a.dport,
                                    a.dip_city, a.dip_country, a.proto, a.response_action, a.url, a.tag, a.event_desc,
                                    a.md5s, a.behavior_name_level, a.malname, a.virus_type, a.virus_family, a.
                                    virus_behavior, a.control_content, a.dev_name, a.dev_id, a.action, a.cve_id,
                                    a.vul_name, a.vul_type, a.client_ip, a.operate_user, a.result, a.action_state,
                                    a.flow_file_type, a.flow_data_detail, a.final_result, a.is_malicious,
                                    a.file_type, a.network_monitor, a.disposal_state
                                from h_element_detail a
                                left join h_analysis_element_info b
                                on a.element_id=b.element_id
                                left join h_system_node c
                                on a.sys_id=c.sys_node_id
                                left join h_information_system d
                                on c.info_sys_id=d.info_sys_id
                                left join sys_agency_info e
                                on d.agency_id=e.agency_id
                                where b.analysis_id=%s and b.element_name='%s' and b.element_source_step='%s'
                            ) a
                           %s %s %s
                        """ % (tb.dict['analyzeId'], tb.dict['element'], tb.dict['elementStep'], tb.where(), tb.offset(), tb.limit()))
        data['dataSource'] = g.cursor.fetchall()
        return json.dumps({'status': 'success', 'data': data})
    except Exception, e:
        return json.dumps({"status": "fail", "msg": "查询失败", 'e': str(e)})


# 深度分析--确认添加要素（over）
@app.route('/api/analyze/depthAnalyze/confirmAddElement', methods=['POST'])
@auth.permission("analyze", "readOnly")
def analyze_depthAnalyze_confirmAddElement(_currUser):
    try:
        form = ValidateForm(
            analyzeId=['分析编号', V.required()],
            type=['要素类型', V.optional()],
            keyword=['要素名称', V.required()],
            elementStep=['要素来源步骤', V.required()],
            checkedAll=['是否勾选全部', V.required()],
            logs=['选中日志集合', V.optional()],
            checkedElement=['协查任务基于的要素', V.optional()],
            esData=['esData', V.optional()],
        )
        (flag, data) = form.validate()
        if not flag:
            data['status'] = 'fail'
            return json.dumps(data)

        data['keyword'] = data['keyword'].lstrip().rstrip()
        data['person_id'] = _currUser['user']['person_id']
        if data['elementStep'] == 'impact':
            data['elementStep'] = '0'
        elif data['elementStep'] == 'potential':
            data['elementStep'] = '1'
        if data['checkedElement']:
            # 协查任务
            g.cursor.execute("""select element_id
                                from h_analysis_element_info a
                                where a.element_name=%(checkedElement)s and a.analysis_id=%(analyzeId)s
                            """, data)
        else:
            g.cursor.execute("""select element_id
                                from h_analysis_element_info a
                                where a.element_name=%(keyword)s and a.analysis_id=%(analyzeId)s
                            """, data)
        analysis_element = g.cursor.fetchone()
        if analysis_element:
            # 存在新增要素
            data['element_id'] = analysis_element['element_id']
            # # 更新分析要素影响信息表 -- h_element_effect_info
            # g.cursor.execute("""update h_element_effect_info
            #                     set (r_time, sys_node_num, info_sys_num, agency_num,
            #                         effect_sys_node, effect_info_sys, effect_agency) =(
            #                             select now() as r_time,
            #                                 count(distinct c.sys_node_id) as sys_node_num,
            #                                 count(distinct c.info_sys_id) as info_sys_num,
            #                                 count(distinct c.agency_id) as agency_num,
            #                                 string_agg(c.sys_node_id, ',') as effect_sys_node,
            #                                 string_agg(c.info_sys_id, ',') as effect_info_sys,
            #                                 string_agg(c.agency_id, ',') as effect_agency
            #                             from (
            #                                 select element_id
            #                                 from h_analysis_element_info
            #                                 where element_id=%(element_id)s
            #                             ) el
            #                             left join h_element_detail a
            #                             on el.element_id = a.element_id
            #                             left join h_information_system b
            #                             on a.sys_id=b.info_sys_id
            #                             left join h_system_node c
            #                             on c.info_sys_id=b.info_sys_id
            #                             group by el.element_id
            #                         )
            #                 """, data)
            # g.conn.commit()
        else:
            # 不存在新增要素
            # 插入分析要素信息表
            g.cursor.execute("""insert into h_analysis_element_info(
                                    analysis_id, element_name, element_source, element_source_step, element_type,
                                    state, r_person, r_time)
                                values (
                                    %(analyzeId)s, %(keyword)s, '0', %(elementStep)s, %(type)s,
                                    '1', %(person_id)s, now()
                                )
                                returning element_id
                            """, data)
            g.conn.commit()
            data['element_id'] = g.cursor.fetchone()['element_id']
            # # 插入分析要素影响信息表 -- h_element_effect_info
            # g.cursor.execute("""insert into h_element_effect_info(
            #                         element_id, r_time, sys_node_num, info_sys_num, agency_num,
            #                         effect_sys_node, effect_info_sys, effect_agency)
            #                     select el.element_id, now() as r_time,
            #                         count(distinct c.sys_node_id) as sys_node_num,
            #                         count(distinct c.info_sys_id) as info_sys_num,
            #                         count(distinct c.agency_id) as agency_num,
            #                         string_agg(c.sys_node_id, ',') as effect_sys_node,
            #                         string_agg(c.info_sys_id, ',') as effect_info_sys,
            #                         string_agg(c.agency_id, ',') as effect_agency
            #                     from (
            #                         select element_id
            #                         from h_analysis_element_info
            #                         where element_id=%(element_id)s
            #                     ) el
            #                     left join h_element_detail a
            #                     on el.element_id = a.element_id
            #                     left join h_information_system b
            #                     on a.sys_id=b.info_sys_id
            #                     left join h_system_node c
            #                     on c.info_sys_id=b.info_sys_id
            #                     group by el.element_id
            #                 """, data)
            # 插入分析相关信息表 -- h_related_analysis_info
            g.cursor.execute("""insert into h_related_analysis_info(
                                    analysis_id, related_type, related_title, related_id, related_title_type,
                                    related_element_name, r_person, r_time)
                                select a.analysis_id, '4' as related_type, c.event_name as related_title,b.event_id as related_id,'1' as related_title_type,
                                    a.element_name as related_element_name, %(person_id)s as r_person, now() as r_time
                                from h_analysis_info d
                                left join h_analysis_element_info a
                                on d.analysis_id=a.analysis_id
                                left join h_attack_beacon b
                                on a.element_name=b.sign_value
                                left join h_attack_eventinfo c
                                on b.event_id=c.event_id
                                where c.event_id!=d.event_id and b.event_id is not null and a.analysis_id=%(analyzeId)s and a.element_name=%(keyword)s
                                union all
                                select a.analysis_id, '3' as related_type, c.risk_event_name as related_title, b.risk_event_id::varchar as related_id,'2' as related_title_type,
                                    a.element_name as related_element_name, %(person_id)s as r_person, now() as r_time
                                from h_analysis_info d
                                left join h_analysis_element_info a
                                on d.analysis_id=a.analysis_id
                                left join h_event_element_info b
                                on a.element_name=b.element_name
                                left join h_risk_event_info c
                                on b.risk_event_id=c.risk_event_id
                                where b.risk_event_id is not null and a.analysis_id=%(analyzeId)s and c.analysis_id not in (
                                    select a.analysis_id
                                    from h_analysis_info a
                                    where a.event_id = (select event_id from h_analysis_info where analysis_id=%(analyzeId)s)
                                ) and a.element_name=%(keyword)s
                            """, data)
            g.conn.commit()
        if data['checkedAll'] == 'true':
            # 全部勾选
            # if data['type'] == '' or data['type'] is None:
            #     data['es_data'] = {
            #         'query': {
            #             'bool': {
            #                 'must': [
            #                     {
            #                         'query_string': {
            #                             'query': data['keyword']
            #                         }
            #                     }
            #                 ]
            #             }
            #         },
            #         'sort': {
            #             'ts': {
            #                 'order': 'desc'
            #             }
            #         }
            #     }
            # else:
            #     data['es_data'] = {
            #         'query': {
            #             'term': {
            #                 data['type']: data['keyword']
            #             }
            #         },
            #         'sort': {
            #             'ts': {
            #                 'order': 'desc'
            #             }
            #         }
            #     }
            esData = g.es.search(index='AIR', doc_type=getEsType()['en_type_name'], body=data['esData'])['hits']['hits']
        else:
            esData = json.loads(data['logs'])
        if esData:
            if data['checkedElement']:
                data['keyword'] = data['checkedElement']
            # 插入es数据存储表
            g.cursor.execute("""insert into h_analytical_es(
                                    element_id, element_name, json_data)
                                values (
                                    %s, '%s', '%s'
                                )
                            """ % (data['element_id'], data['keyword'], json.dumps(makeLog(esData))))
            g.conn.commit()
        return json.dumps({'status': 'success', 'data': data['element_id'], 'esData': json.dumps(makeLog(esData))})
    except Exception, e:
        return json.dumps({"status": "fail", "msg": "查询失败", 'e': str(e)})


# 深度分析--删除要素（over）
@app.route('/api/analyze/depthAnalyze/delElement', methods=['POST'])
@auth.permission("analyze", "readOnly")
def analyze_depthAnalyze_delElement():
    try:
        form = ValidateForm(
            analyzeId=['分析编号', V.required()],
            element=['要素名称', V.required()],
            elementStep=['要素来源步骤', V.required()]
        )
        (flag, data) = form.validate()
        if not flag:
            data['status'] = 'fail'
            return json.dumps(data)

        if data['elementStep'] == 'impact':
            data['impact'] = '0'
        elif data['elementStep'] == 'potential':
            data['impact'] = '1'
        # 删除分析要素日志明细表
        g.cursor.execute("""delete
                            from h_element_detail
                            where element_id = (
                                select element_id
                                from h_analysis_element_info
                                where analysis_id=%(analyzeId)s and element_name=%(element)s and element_source_step=%(impact)s
                           )
                        """, data)
        g.conn.commit()
        # 删除协查任务信息表中存在此要素的任务中的要素编号
        g.cursor.execute("""update h_survey_task_info
                            set element_id=null
                            where element_id = (
                                select element_id
                                from h_analysis_element_info
                                where analysis_id=%(analyzeId)s and element_name=%(element)s and element_source_step=%(impact)s
                           )
                        """, data)
        g.conn.commit()
        # 删除分析要素影响信息表
        g.cursor.execute("""delete
                            from h_element_effect_info
                            where element_id = (
                                select element_id
                                from h_analysis_element_info
                                where analysis_id=%(analyzeId)s and element_name=%(element)s and element_source_step=%(impact)s
                           )
                        """, data)
        g.conn.commit()
        # 删除分析要素信息表
        g.cursor.execute("""delete
                            from h_analysis_element_info
                            where analysis_id=%(analyzeId)s and element_name=%(element)s and element_source_step=%(impact)s
                            returning element_id
                        """, data)
        g.conn.commit()
        element_id = g.cursor.fetchone()
        # 删除分析相关信息表
        g.cursor.execute("""delete
                            from h_related_analysis_info
                            where analysis_id=%(analyzeId)s and related_element_name=%(element)s
                        """, data)
        g.conn.commit()
        return json.dumps({'status': 'success', 'data': element_id})
    except Exception, e:
        return json.dumps({"status": "fail", "msg": "查询失败", 'e': str(e)})


# 深度分析--删除相关信息或日志（over）
@app.route('/api/analyze/depthAnalyze/delRelatedOrLog', methods=['POST'])
@auth.permission("analyze", "readOnly")
def analyze_depthAnalyze_delRelatedOrLog():
    try:
        form = ValidateForm(
            id=['记录编号或日志唯一标识', V.required()],
            elementId=['要素编号', V.required()],
        )
        (flag, data) = form.validate()
        if not flag:
            data['status'] = 'fail'
            return json.dumps(data)

        if data['elementId'] == '0':
            # 删除分析相关信息表
            g.cursor.execute("""delete
                                from h_related_analysis_info
                                where id in (select Unnest(string_to_array(%(id)s,','))::int)
                            """, data)
            g.conn.commit()
        else:
            # 删除分析要素日志明细表
            g.cursor.execute("""delete
                                from h_element_detail
                                where element_id=%(elementId)s and log_id=%(id)s
                            """, data)
            g.conn.commit()
        return json.dumps({'status': 'success'})
    except Exception, e:
        return json.dumps({"status": "fail", "msg": "查询失败", 'e': str(e)})


# 深度分析--上传人工分析依据
@app.route('/api/analysis/depthAnalyze/addfile', methods=['POST'])
@auth.permission('analyze', "readOnly")
def analyze_depthAnalyze_addfile():
    try:
        reqFile = request.files.get('file', None)
        if not reqFile:
            return json.dumps({"status": "fail", "msg": "未选择样本"})

        if os.path.exists(config.MAN_ANALYSIS_SAVE) is False:  # 判断路径是否存在
            # 创建路径
            os.makedirs(config.MAN_ANALYSIS_SAVE)
        # 保存文件
        reqFile.save("{}/{}".format(os.path.join(config.MAN_ANALYSIS_SAVE), reqFile.filename))
        # 返回地址
        path = "{}/{}".format(os.path.join(config.MAN_ANALYSIS_SAVE), reqFile.filename)
        return json.dumps({"status": "success", "path": path})
    except Exception, e:
        return json.dumps({"status": "except", "msg": "文件导入验证失败", 'e': str(e)})
