from sklearn.externals import joblib
import re

svmclf = joblib.load("C:\\Users\\LENOVO\\Desktop\\machineLearning\\Code\\testSite\\myDetector\\detectorTools\\xss-svm-module.m")
print("svm loaded...")

def get_len(url):
    return len(url)

def get_url_count(url):
    if re.search('(http://)|(https://)', url, re.IGNORECASE) :
        return 1
    else:
        return 0

def get_evil_char(url):
    return len(re.findall("[<>,\'\"/]", url, re.IGNORECASE))

def get_evil_word(url):
    return len(re.findall("(alert)|(script=)(%3c)|(%3e)|(%20)|(onerror)|(onload)|(eval)|(src=)|(prompt)",url,re.IGNORECASE))

def get_last_char(url):
    if re.search('/$', url, re.IGNORECASE) :
        return 1
    else:
        return 0

def get_feature(url):
    return [get_len(url),get_url_count(url),get_evil_char(url),get_evil_word(url),get_last_char(url)]

def etl(raw,data):
	for line in raw:
		f1=get_len(line)
		f2=get_url_count(line)
		f3=get_evil_char(line)
		f4=get_evil_word(line)
		data.append([f1,f2,f3,f4])
	return data
def getMaliciousString(article,predict,record):
	for index in range(0,len(predict)):
		if predict[index] == 1:
			record.append("svm:"+article[index]+";")
	return record 
if __name__ == '__main__':
	# label 0 for good
	articles = list()
	articles.append('agb,bvhdlajdgh.')
	articles.append('haukfjq38947,"jdia!".')
	articles.append('789465132')
	articles.append('<div>Leave your message</div>')
	articles.append('<td id="paramName">{{paramName}}</td><td id="paramValue">{{value}}</td><td id="funcBtn"><div onclick=ajaxRequest(this,"confirm")>confirm</div></td>')
	articles.append('%3Cimg%20src%3Dfoo.png%20onerror%3Dalert%28/xssed/%29%20/%3E%20%20%20%0A')
	data = list()
	etl(articles,data)
	predict=svmclf.predict(data)
	record = list()
	print(svmclf.predict_log_proba(data))
	print(getMaliciousString(articles,predict,record))