from scapy.all import *
import threading, time, re, os 
from selenium import webdriver
import shutil
from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
import datetime
import SSHConnection

## ========================================================================== ##
## ------------------------ USER CONFIGURATION FLAGS ------------------------ ##
## ========================================================================== ##

### 本机设置
## 时间设置
access_pages=2                # 每个网页收集次数
local_catch_traffic_time=21   # 本机收集流量时间
sniff_to_open=5               # 开始收集流量到开始打开网页时间
page_duration=21              # 访问网页时长
shutdown_wait=5               # 关闭网页后等待时间

## 其他设置
# 网址文件
websites='websites.txt'
# 本机IP
local_IP='XXX.XXX.XXX.XXX'
# 本机网卡
local_iface='Intel(R) Wireless-AC XXXX XXXMHz'

### 服务器设置
## 网关
gateway_catch_traffic_time=21 # 网关每次抓流量时长
gateway_dict={'host':'XXX.XXX.XXX.XXX','port':2222,'username':'','pwd':''}
gateway_path='/media/work/XXX'

## 代理服务器
server_catch_traffic_time=21  # 代理服务器端抓流量时长
server_IP=''
server_dict={'host':'XXX.XXX.XXX.XXX','port':22,'username':'','pwd':''}
server_path='/root/XXX'

## ========================================================================== ##
## -------------------- DO NOT EDIT THE FOLLOWING BLOCKS -------------------- ##
## ========================================================================== ##

# 流量收集
def catch_traffic(*website):
    # 收集流量20秒
    IPfilter='host '+ server_IP + ' and tcp'
    PTKS = sniff(filter=IPfilter, iface=local_iface, timeout=local_catch_traffic_time)
    pcapname = './pcap/' + website[1] + '/' + datetime.datetime.now().strftime('%Y%m%d_%H%M_%S_') + website[1] + str(website[0]) + '.pcap'
    wrpcap(pcapname, PTKS)

# 发送命令进程
def SSH_cmd(*args):#gatewaySSH,gateway_catch_traffic_time,gpage_path,sudo=False
    if(args[3]==False):
        cmd = 'timeout '+str(args[1])+' tcpdump -s0 -G '+str(args[1])+' -w '+args[2]+'/%Y+%m%d_%H%M_%S.pcap'
        args[0].cmd(cmd)
    else:
        cmd = 'sudo timeout '+str(args[1])+' tcpdump -s0 -G '+str(args[1])+' -w '+args[2]+'/%Y+%m%d_%H%M_%S.pcap'
        args[0].cmd(cmd, sudo=True)

# 浏览器设定
def create_driver():
    ##Firefox prfile object
    firefoxProfile = FirefoxProfile()
    
    #Firefox proxy
    proxy='127.0.0.1:9050'
    ip, port=proxy.split(":")
    port=int(port)

    settings={
        'network.proxy.type':1,
        'network.proxy.socks':ip,
        'network.proxy.socks_port':port,
        }    
    for key, value in settings.items():
        firefoxProfile.set_preference(key, value)
    ##Disable browser cache
    firefoxProfile.set_preference("browser.cache.memory.enable", False)
    firefoxProfile.set_preference("browser.cache.disk.enable", False)
    firefoxProfile.set_preference("browser.cache.check_doc_frequency", 1)
    firefoxProfile.set_preference("network.http.use-cache", False)
    firefoxProfile.set_preference("browser.sessionhistory.max_total_viewers", 3)
    firefoxProfile.set_preference("network.dns.disableIPv6", True)
    firefoxProfile.set_preference("Content.notify.interval", 750000)
    firefoxProfile.set_preference("content.notify.backoffcount", 3)
    
    firefoxProfile.set_preference("network.dns.blockDotOnion", False)
    firefoxProfile.set_preference("network.proxy.socks_remote_dns", True)
    
    firefoxProfile.update_preferences()
    firefox = webdriver.Firefox(firefoxProfile)
    firefox = webdriver.Firefox()
    return firefox

driver = create_driver()

# gateway和server建立连接
gatewaySSH=SSHConnection.SSHConnection(gateway_dict)
gatewaySSH.connect()

serverSSH =SSHConnection.SSHConnection(server_dict)
serverSSH.connect()

server_cmd=';sudo tcpdump -s0 -G ' + str(server_catch_traffic_time) + ' -w %Y_%m%d_%H%M_%S.pcap'

# 主函数
with open(websites, 'r') as f:
    pages = f.readlines()

    # 获取网页
    for page in pages:
        # 获取网页名称
        page = page.replace('\n', ' ')
        pattern = r'(http://)(.*?)\.(.*?)'
        s = re.search(pattern, page)
        # 生成网页文件夹
        isExists = os.path.exists('./pcap/' + s.group(2))
        if(isExists):
            shutil.rmtree('./pcap/' + s.group(2))
        os.mkdir('./pcap/' + s.group(2))
        # 在服务器和网关端生成文件夹
        gpage_path=gateway_path+'/'+s.group(2)
        gatewaySSH.cmd('sudo mkdir '+gpage_path,sudo=True)
        spage_path=server_path+'/'+s.group(2)
        serverSSH.cmd('mkdir '+spage_path)

        # 每个网页收集access_pages次
        for i in range(access_pages):
            ## start sniff traffic
            # 本机
            catch_traffic_thread = threading.Thread(target=catch_traffic, args=(i, s.group(2)))
            catch_traffic_thread.start()

            # 服务器和网关开始收集流量
            # 网关
            gateway_thread = threading.Thread(target=SSH_cmd, args=(gatewaySSH, gateway_catch_traffic_time, gpage_path, True))
            gateway_thread.start()
            # 服务器
            server_thread = threading.Thread(target=SSH_cmd, args=(serverSSH,server_catch_traffic_time,spage_path,False))
            server_thread.start()

            # 停顿一秒后开始打开网页
            time.sleep(sniff_to_open)
            
            ## open website
            driver.switch_to.window(driver.window_handles[0])
            js = "window.open('" + page + "')"
            driver.execute_script(js)
            # 访问page_duration秒（从打开网页到关闭的时间）
            time.sleep(page_duration)
            ## shutdown website
            driver.switch_to.window(driver.window_handles[len(driver.window_handles)-1])
            driver.close()

            #关闭网页后等待
            time.sleep(shutdown_wait)
 
