#!/usr/bin/env python
# coding=utf-8
"""
从stanford neural parser解析文本得到的dependency graph中提取POS tag为NNP（专有名词）的词汇作为其影响软件的候选
"""
import xmlrpclib
import networkx
from networkx.algorithms.traversal.breadth_first_search import bfs_edges
from DependencyGraph import GenerateGraph
import re

def find_compound_chain(G, node):
    result = [node]
    for edge in G.edges(node, data="tag"):
        if edge[2] == "compound":
            result.extend(find_compound_chain(G, edge[1]))
    return result

def find_nmod_chain(G, node):
    head = find_compound_chain(G,node)[::-1]
    for edge in G.edges(node, data="tag"):
        if edge[2] == "case":
            head = (edge[1], head)
    
    result = [head]

    for edge in G.edges(node, data="tag"):
        if edge[2] == "nmod":
            result.extend(find_nmod_chain(G, edge[1]))
    return result

def extract_nmod_chain(text):
    proxy = xmlrpclib.ServerProxy("http://10.141.209.137:28000/")
    origin_data =  proxy.parse(text)
    # origin_data = [[['allows', 'VBZ'], 'nsubj', ['overflow', 'NN']], [['overflow', 'NN'], 'amod', ['Stack-based', 'JJ']], [['overflow', 'NN'], 'compound', ['buffer', 'NN']], [['overflow', 'NN'], 'nmod', ['function', 'NN']], [['function', 'NN'], 'case', ['in', 'IN']], [['function', 'NN'], 'det', ['the', 'DT']], [['function', 'NN'], 'compound', ['split_redraw', 'NN']], [['function', 'NN'], 'nmod', ['split.c', 'NN']], [['split.c', 'NN'], 'case', ['in', 'IN']], [['function', 'NN'], 'nmod', ['mtr', 'NN']], [['mtr', 'NN'], 'case', ['in', 'IN']], [['overflow', 'NN'], 'nmod', ['0.73', 'CD']], [['0.73', 'CD'], 'case', ['before', 'IN']], [['0.73', 'CD'], 'punct', [',', ',']], [['0.73', 'CD'], 'acl:relcl', ['invoked', 'VBN']], [['invoked', 'VBN'], 'advmod', ['when', 'WRB']], [['invoked', 'VBN'], 'nmod', ['option', 'NN']], [['option', 'NN'], 'case', ['with', 'IN']], [['option', 'NN'], 'det', ['the', 'DT']], [['option', 'NN'], 'punct', ['-', ':']], [['option', 'NN'], 'compound', ['p', 'NN']], [['option', 'NN'], 'dep', ['aka', 'NN']], [['aka', 'NN'], 'punct', ['-LRB-', '-LRB-']], [['aka', 'NN'], 'punct', ['--', ':']], [['aka', 'NN'], 'dep', ['split', 'NN']], [['aka', 'NN'], 'punct', ['-RRB-', '-RRB-']], [['0.73', 'CD'], 'punct', [',', ',']], [['allows', 'VBZ'], 'dobj', ['attackers', 'NNS']], [['attackers',
# 'NNS'], 'amod', ['remote', 'JJ']], [['allows', 'VBZ'], 'xcomp', ['execute', 'VB']], [['execute', 'VB'], 'mark', ['to', 'TO']], [['execute', 'VB'], 'dobj', ['code', 'NN']], [['code', 'NN'], 'amod', ['arbitrary', 'JJ']], [['execute', 'VB'], 'nmod', ['record', 'NN']], [['record', 'NN'], 'case', ['via', 'IN']], [['record', 'NN'], 'det', ['a', 'DT']], [['record', 'NN'], 'amod', ['crafted', 'VBN']], [['record', 'NN'], 'compound', ['DNS', 'NN']], [['record', 'NN'], 'compound', ['PTR', 'NN']], [['allows', 'VBZ'], 'punct', ['.', '.']]]
    # print origin_data
    # GenerateGraph(origin_data)
    G = networkx.DiGraph()
    for x in origin_data:
        G.add_edge(tuple(x[0]),tuple(x[2]),tag=x[1])
    root = list(networkx.topological_sort(G))[0]

    start_node = None
    for edge in G.edges(root, data="tag"):
        if edge[2] == "nsubj":
            start_node = edge[1]
    if start_node is None:
        start_node = root

    return find_nmod_chain(G, start_node)
    
def filterEntity(nmod_chain):
    '''
    找出形如 in Apache2这样的格式
    并且过滤掉形如split_redraw这样的函数名
    和split.c这样的文件名
    '''
    result = []
    re_list = [
        r"^([a-zA-Z0-9]+_)+[a-zA-Z0-9]+$",
        r"^\w+.c$",
        r"^\w+.cpp$"
        r"^\w+.java$"
    ]
    for item in nmod_chain:
        if len(item)==1:
            continue
        preposition = item[0][0]
        if preposition != "in":
            continue
        target_list = [x[0] for x in item[1] if x[1]=="NN" or x[1]=="NNP"]
        if "function" in target_list:
            continue
        if "file" in target_list:
            continue
        
        found_match = False
        for target in target_list:
            for regex in re_list:
                if re.match(regex, target):
                    found_match = True
                    break
            if found_match:
                break
        if not found_match:
            result.extend(target_list)
    return result

def ExtractEntity(text):
    try:
        nmod_chain = extract_nmod_chain(text)
        return filterEntity(nmod_chain)
    except Exception, e:
        import traceback
        traceback.print_exc()
        return []

# text1 = "lib/curl.rb in the Curl Gem for Ruby allows remote attackers to execute arbitrary commands via shell metacharacters in a URL."
# text = "The cURL library (libcurl) in PHP 4.4.2 and 5.1.4 allows attackers to bypass safe mode and read files via a file:// request containing null characters."
# print extract_nmod_chain(text1)

# for info in session.query(CVE.id, CVE.summary)[:10]:
#     cve_id, summary = info
#     print cve_id， extract_nmod_chain(summary)

if __name__ == "__main__":
    text = "Stack-based buffer overflow in the split_redraw function in split.c in mtr before 0.73, when invoked with the -p (aka --split) option, allows remote attackers to execute arbitrary code via a crafted DNS PTR record. NOTE: it could be argued that this is a vulnerability in the ns_name_ntop function in resolv/ns_name.c in glibc and the proper fix should be in glibc; if so, then this should not be treated as a vulnerability in mtr."
    # text = "An issue was discovered on Mimosa Client Radios before 2.2.4 and Mimosa Backhaul Radios before 2.2.4. On the backend of the device's web interface, there are some diagnostic tests available that are not displayed on the webpage; these are only accessible by crafting a POST request with a program like cURL. There is one test accessible via cURL that does not properly sanitize user input, allowing an attacker to execute shell commands as the root user."
    # text = "Multiple stack-based buffer overflows in the RTF import feature in AbiWord before 2.2.11 allow user-assisted attackers to execute arbitrary code via an RTF file with long identifiers, which are not properly handled in the (1) ParseLevelText, (2) getCharsInsideBrace, (3) HandleLists, (4) or (5) HandleAbiLists functions in ie_imp_RTF.cpp, a different vulnerability than CVE-2005-2964."
    # text = "389-ds-base version before 1.3.5.19 and 1.3.6.7 are vulnerable to password brute-force attacks during account lockout due to different return codes returned on password attempts."
    # text = """The ap_pregsub function in server/util.c in the Apache HTTP Server 2.0.x through 2.0.64 and 2.2.x through 2.2.21, when the mod_setenvif module is enabled, does not restrict the size of values of environment variables, which allows local users to cause a denial of service (memory consumption or NULL pointer dereference) via a .htaccess file with a crafted SetEnvIf directive, in conjunction with a crafted HTTP request header, related to (1) the "len +=" statement and (2) the apr_pcalloc function call, a different vulnerability than CVE-2011-3607."""
    # text = """When under stress, closing many connections, the HTTP/2 handling code in Apache httpd 2.4.26 would sometimes access memory after it has been freed, resulting in potentially erratic behaviour."""

    # result = [[('HTTP/2', 'NN'), ('handling', 'VBG'), ('code', 'NN')], (('in', 'IN'), [('Apache', 'NNP'), ('httpd', 'NN')])]
    result = extract_nmod_chain(text)
    print result

    print filterEntity(result)
