import requests
from bs4 import BeautifulSoup
import pandas as pd

# 1. 发送请求（带登录凭证）
headers = {
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36 Edg/135.0.0.0",
    "Cookie": "autoShowedHelpCenter=1%7C1; .POSPALAUTH30220=0102CAF795EE2F78DD08FE4AC39ED23878DD08000835003700340030003100310034003A000000FF669A0C7A734A5315D26CCA5646579B1DB14DB400; pageOpenCount=18; loginVersionStrForPospal=7813119; ceMenuMode=1; browserUuidForPospal=dfbd8e33-0e37-4943-a4d2-26f1952c3897; sessionGuid=979e48e3-08ee-40cc-96e1-9898c9037e8a; storeIndustryForPospal=102; daysLimitForPospal=; bindTelExpired=1; lastLoginAccount=hrgl88; .POSPALAUTH30220=010200BB314E2A78DD08FE80863A323378DD08000835003700340030003100310034003A000000FF8C8DBFA55FD64E8FB19EB619470BA5B6725CCA9A; kefuOnlineTimesForPospal=%7B%22startTime%22%3A%2200%3A00%22%2C%22endTime%22%3A%2223%3A59%22%2C%22insertTimeStamp%22%3A1744290151395%7D"  # 关键：必须使用登录后的有效Cookie
}

url = "https://beta72.pospal.cn/Dashboard/Catering"  # 替换为目标报表页面URL
response = requests.get(url, headers=headers)

# 2. 解析HTML
soup = BeautifulSoup(response.text, 'html.parser')

# 3. 定位数据表格（根据实际页面结构调整选择器）
table = soup.select_one("#reportTable")  # 假设表格id为reportTable

# 4. 提取表头和数据
headers = [th.text.strip() for th in table.select("tr th")]
rows = []
for tr in table.select("tr")[1:]:  # 跳过表头行
    cells = [td.text.strip() for td in tr.select("td")]
    if cells:  # 过滤空行
        rows.append(cells)

# 5. 转换为DataFrame分析
df = pd.DataFrame(rows, columns=headers)
print(df.head())

# 6. 保存数据（示例保存为Excel）
df.to_excel("sales_report.xlsx", index=False)