"""
cron: 0 */4 * * *
new Env('招聘信息采集');
"""
import requests
import os
import mysql.connector
from datetime import datetime
from bs4 import BeautifulSoup

# 获取环境变量
host = os.environ.get('HOST')
user = os.environ.get('USER')
password = os.environ.get('PASSWORD')
database = os.environ.get('DATABASE')

print("正在获取环境变量...")
print(f"数据库地址：{host}")
print(f"数据库用户名：{user}")
print(f"数据库密码：{password}")
print(f"数据库名称：{database}")

# 连接数据库
connection = mysql.connector.connect(
    host=host,
    user=user,
    password=password,
    database=database
)

cursor = connection.cursor()

# 创建表（如果不存在）
create_table_query = """
CREATE TABLE IF NOT EXISTS ningm_job_info (
    id INT AUTO_INCREMENT PRIMARY KEY,
    job_title VARCHAR(200) NOT NULL,
    company_name VARCHAR(200),
    salary_range VARCHAR(100),
    location VARCHAR(100),
    experience VARCHAR(50),
    education VARCHAR(50),
    job_type VARCHAR(50),
    job_url VARCHAR(500),
    create_time DATETIME DEFAULT CURRENT_TIMESTAMP,
    INDEX idx_location (location),
    INDEX idx_job_type (job_type),
    INDEX idx_create_time (create_time)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
"""
cursor.execute(create_table_query)

# 要监控的职位和城市
job_searches = [
    {'keyword': 'Python', 'city': '北京'},
    {'keyword': 'Java', 'city': '北京'},
    {'keyword': 'Python', 'city': '上海'},
    {'keyword': 'Java', 'city': '上海'}
]

def get_job_list(keyword, city):
    """获取职位列表"""
    try:
        headers = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
            'Referer': 'https://www.zhaopin.com/'
        }
        
        # 智联招聘搜索API
        url = 'https://fe-api.zhaopin.com/c/i/sou'
        params = {
            'pageSize': 20,
            'cityId': city,
            'kw': keyword,
            'kt': 3
        }
        
        response = requests.get(url, headers=headers, params=params)
        
        if response.status_code == 200:
            data = response.json()
            jobs = data.get('data', {}).get('results', [])
            
            for job in jobs:
                job_title = job.get('jobName', '')
                company_name = job.get('company', {}).get('name', '')
                salary_range = job.get('salary', '')
                location = job.get('city', {}).get('display', '')
                experience = job.get('workingExp', {}).get('name', '')
                education = job.get('eduLevel', {}).get('name', '')
                job_type = keyword
                job_url = job.get('positionURL', '')
                
                print(f"正在保存职位数据: {job_title} - {company_name}")
                insert_query = """
                    INSERT INTO ningm_job_info 
                    (job_title, company_name, salary_range, location, experience, 
                     education, job_type, job_url) 
                    VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
                """
                insert_values = (
                    job_title,
                    company_name,
                    salary_range,
                    location,
                    experience,
                    education,
                    job_type,
                    job_url
                )
                cursor.execute(insert_query, insert_values)
                
    except Exception as e:
        print(f"获取职位数据失败: {str(e)}")

# 删除旧数据（保留1天内的数据）
print("删除旧数据...")
delete_query = "DELETE FROM ningm_job_info WHERE create_time < DATE_SUB(NOW(), INTERVAL 1 DAY)"
cursor.execute(delete_query)

# 获取并保存数据
print("开始获取招聘信息...")
for search in job_searches:
    get_job_list(search['keyword'], search['city'])
    # 添加延时避免请求过快
    time.sleep(2)

connection.commit()
cursor.close()
connection.close()

print("数据保存完成！") 