import os
import time
import subprocess
import json
from openpyxl import Workbook
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.chrome.options import Options
from mitmproxy import http
from mitmproxy import ctx

# Global variable to store captured data
captured_data = []

# Function to start mitmdump
def start_mitmdump():
    # Start mitmdump in the background
    try:
        with open(os.devnull, 'w') as devnull:
            subprocess.Popen(['mitmproxy', '--mode', 'regular'], stdout=devnull, stderr=devnull) 
            print('------------------------------') # Redirect output to devnull
    except Exception as e:
        print(f"启动 mitmproxy 失败: {str(e)}")

# Function to set system proxy
def set_system_proxy():
    # Set system proxy using netsh command
    try:
        os.system('netsh winhttp set proxy 127.0.0.1:8080')  # Set HTTP proxy
        os.system('netsh winhttp set proxy 127.0.0.1:8080')  # Set HTTPS proxy
    except Exception as e:
        print(f"设置系统代理失败: {str(e)}")

# Function to capture requests
def request(flow: http.HTTPFlow) -> None:
    if flow.request.pretty_url.startswith("https://www.douyin.com/aweme/v1/web/aweme/post/?device_platform=webapp"):
        # Extract relevant information
        data = {
            'Timestamp': time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()),  # 格式化时间戳
            'Cookie': flow.request.headers.get('Cookie'),
            'User-Agent': flow.request.headers.get('User-Agent'),
            'Referer': flow.request.headers.get('Referer'),
            'Videos': [
                {
                    'URL': video['play_addr']['url_list'][-1],
                    'Description': video.get('desc', '')  # 获取视频描述
                } for video in json.loads(flow.response.text).get('aweme_list', [])
            ]
        }
        captured_data.append(data)

# Function to write data to Excel
def write_to_excel():
    workbook = Workbook()
    sheet = workbook.active
    sheet.append(['Timestamp', 'Cookie', 'User-Agent', 'Referer', 'Video URLs', 'Descriptions'])  # Header row

    for entry in captured_data:
        for video in entry['Videos']:
            sheet.append([entry['Timestamp'], entry['Cookie'], entry['User-Agent'], entry['Referer'], video['URL'], video['Description']])

    workbook.save('captured_data.xlsx')

# Main function
if __name__ == "__main__":
    start_mitmdump()
    set_system_proxy()

    # Set up Chrome options
    options = webdriver.ChromeOptions()
    # options.add_argument('--headless')  # 无头模式
    options.add_argument('--disable-gpu')  # 禁用 GPU（可选）
    options.add_argument("C:/Users/姚望/AppData/Local/Google/Chrome/User Data") # Run in headless mode # Specify the path to chromedriver
    driver = webdriver.Chrome(options=options)

    # Open the Douyin user page
    driver.get("https://www.douyin.com/user/MS4wLjABAAAAiT1NQP7pWcEZBjQtvrxvNTo_YZkcJ1UZDnxoVkeIJZQ")

    # Wait for data to be captured
    time.sleep(880)

    # Write captured data to Excel
    write_to_excel()

    # Close the browser
    driver.quit()

    # Clean up on exit
    try:
        while True:
            time.sleep(1)
    except KeyboardInterrupt:
        os.environ['http_proxy'] = ''
        os.environ['https_proxy'] = ''
        # Terminate mitmdump process if needed
