import asyncio
import random
from datetime import datetime
import aiohttp
from parsel import Selector
import logging
import pymysql
import requests
import os
# Configure logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')

# 请求头列表信息
HEADERS_list = [
    # {
    #     'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36 Edg/125.0.0.0'
    # }, {
    #     'User-Agent': 'Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2946.89 Safari/537.36'
    # }, {
    #     'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2945.74 Safari/537.36'
    # } ,
    # {
    #     'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36 Edg/126.0.0.0'
     
    # }
        {
    
       'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36 Edg/126.0.0.0'
     
    }
]
async def fetch_page(session, url, timeout=30, retries=3):
    for attempt in range(retries):
        try:
            headers = random.choice(HEADERS_list)  # 选择随机请求头
            await asyncio.sleep(random.uniform(1, 2))  # 增加随机延时
            async with session.get(url, headers=headers, timeout=timeout) as response:
                response.raise_for_status()
                return await response.text()
        except Exception as e:
            logging.error(f"Attempt {attempt + 1} failed to fetch {url}: {e}")
            await asyncio.sleep(2 ** attempt)
        return None


async def process_region(city_name, session,  semaphore, pageRange):
    logging.info(f"开始抓取数据......")
    all_data = []
    for page in pageRange:
        url = f'https://{city_name}.lianjia.com/ershoufang/pg{page}'
        print(url)
        async with semaphore:
            res_text = await fetch_page(session, url)
            selector = Selector(res_text)
            infos = selector.css('.sellListContent li .info')
            imgs = selector.css('.lj-lazy::attr(data-original)').getall()
            for url in imgs:     
                print(url)
            if not infos:
                break  # No more listings found, exit loop
            for index, info in enumerate(infos):
                try:
                    title = info.css('.title a::text').get()
                    house_info = info.css('.address .houseInfo::text').get()
                    # print(index)


                    img = imgs[index]
                    response = requests.get(img)
                    img_na = "".join(str(random.randint(0, 9)) for _ in range(6))
                    img_name = img_na + ".jpg"
                    if response.status_code == 200:
                        # 直接以二进制写入文件保存
                        with open(os.path.join('data', img_name), "wb") as f:
                            f.write(response.content)
                    else:
                        print(f"无法获取图片，状态码：{response.status_code}")    
    
                    house_location = info.css('.flood .positionInfo a::text').get()
                    house_unitPrice = (str((info.css('div.unitPrice span::text').get()).split('元/平')[0])).replace(',',
                                                                                                                    '')
                    price = info.css('div.totalPrice span::text').get().replace('万', '')
                    # 提取关注数，它位于包含类 'followInfo' 的元素中
                    follow_info = selector.css('.followInfo::text').get()
                    # 假设关注数和发布时间用“/”分隔，我们只取第一部分
                    # followers_count = follow_info.split('/')[0].strip() if follow_info else '未知'
                    followers_count = random.randint(5, 140)
                    # 获取小区名称
                    community_name = selector.css('.positionInfo a:first-of-type::text').get()
                    # 获取镇（街道）名称
                    town_name = selector.css('.positionInfo a:last-of-type::text').get()
                    all_data.append({
                        '标题': title,
                        '镇/街道': town_name,
                        '小区': community_name,
                        '位置': house_location,
                        '单价（元/平米）': int(house_unitPrice),
                        '总价（万）': float(price),
                        '房屋信息': house_info,
                        '网址': url,
                        '图片': 'upload/'+img_name,
                        '关注数': followers_count,
                    })
                except Exception as e:
                    logging.error(f"Error processing {url}: {e}")
    return all_data


async def main():
    ori_name={'sh':"上海",'jx':'嘉兴','fs':'佛山'}

    # 城市名，比如佛山
    city_name = 'sh'

    # 页面范围
    pageRange = range(1, 2)

    print("抓取开始，在页面数量过多的情况下可能会耗时过长....")
    semaphore = asyncio.Semaphore(2)  # 控制并发数
    async with aiohttp.ClientSession() as session:

        tasks = [process_region(city_name, session, semaphore, pageRange)]
        all_regions_data = await asyncio.gather(*tasks)

    combined_data = [item for region_data in all_regions_data for item in region_data]

    
    # 连接 MySQL 数据库并插入数据
    connection = pymysql.connect(
        host='127.0.0.1',
        user='root',
        password='123456',
        database='pyzufang'
    )
    cursor = connection.cursor()
    for data in combined_data:

        # 获取房屋类型
        fangwuleixing = ['普通商品房', '经济适用房', '小产权房' ,'房改房', '廉租房', '公租房']
        leixing = random.choice(fangwuleixing)

        # 获取房屋编号
        bianhao = "".join(str(random.randint(0, 9)) for _ in range(10))

        # 获取插入时间
        current_time = datetime.now()
        formatted_time = current_time.strftime('%Y-%m-%d %H:%M:%S')

        # 获取详细信息
        datadetail=data['房屋信息'].split('|')
        print(type(data['房屋信息']))
        print(datadetail)
        # 获取租赁方式
        zulinfangshi = ['整租', '合租', '押一付三' ,'包租','短租']
        fangshi = random.choice(zulinfangshi)

        # 房主账号
        fangzhu = {'0011': ["李星云",'13823888881'], '0012': ["萧炎",'13823888882'], '0013': ["陈平安",'13823888883']}
        fangzhuzhanghao = ['0011', '0012', '0013']
        zhanghao = random.choice(fangzhuzhanghao)

        try:
            query = """
            INSERT INTO fangwuxinxi (addtime , biaoti, fangwubianhao, fangwuleixing, huxing, zulinfangshi, yuyuekanfang, chengshi, dizhi, tupian, mianji, zujin, fangyuansheshi, hexinmaidian, faburiqi, fangyuanxiangqing, fangzhuzhanghao, fangzhuxingming, lianxidianhua,sfsh,shhf,clicktime,clicknum)
            VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s,%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
            """
            values = (
                formatted_time,data['标题'],bianhao, leixing ,datadetail[0],fangshi,"接受", 
                ori_name[city_name], data['位置']+ " " +data['镇/街道']+ " " +data['小区'], 
                data['图片'], datadetail[1],data['总价（万）'],datadetail[3],data['标题'],
                formatted_time,data['房屋信息']+ " " +data['位置']+ " " +data['镇/街道'],
                zhanghao,fangzhu[zhanghao][0],fangzhu[zhanghao][1],'是','爬虫所得',
                formatted_time, data['关注数'])
            cursor.execute(query, values)
        except Exception as e:
            logging.error(f"Error inserting data: {e}")
    connection.commit()
    cursor.close()
    connection.close()

if __name__ == "__main__":
    asyncio.run(main())
