from eolcrawl.spiders.comspider import ComlistSpider
from eolcrawl.spiderutils.common import build_url
import os
from eolcrawl.spiders.comspider import set_spider_feeds
import scrapy
import re
from playwright.async_api import Page

# https://gitee.com/login?redirect_to_url=https%3A%2F%2Fgitee.com%2Forganizations%2Fagiros%2Fprojects
class AgirosRealseCodeSpider(ComlistSpider):
    name = "agiros_realse_code_login"
    allowed_domains = ["gitee.com"]
    start_urls = ["https://gitee.com/login"]
    site_name = "gitee.com"
    category = ""
    type = 'query_spider'
    branch = os.getenv("ROS_BRANCH","humble")
    ros_prefix = os.getenv("ROS_REPOS_NAME","ros")
    
    def __init__(self, name=None, **kwargs):
        super(AgirosRealseCodeSpider, self).__init__(name, **kwargs)
        self.logger.info(f"======{self.ros_prefix}======{self.branch}=========")
        self.start_request_callback = self.login  # 使用parse_loop作为入口方法
    
    @classmethod
    def from_crawler(cls, crawler, *args, **kwargs):
        spider = super(AgirosRealseCodeSpider, cls).from_crawler(crawler, *args, **kwargs)
        set_spider_feeds(spider,crawler)
        # FEED_EXPORT_ENCODING = 'utf-8'
        return spider

    def start_requests(self):
        """重写起始请求方法"""
        for url in self.start_urls:
            yield scrapy.Request(
                url=url,
                meta={
                    "playwright": True,
                    "playwright_include_page": True,
                    # "playwright_page_methods": [
                    #     PageMethod("wait_for_selector", ".project-title .repository"),
                    # ],
                },
                callback=self.start_request_callback,
            )

    async def scroll_page(self, page):
        """滚动到页面底部"""
        await page.evaluate('window.scrollTo(0, document.body.scrollHeight)')
        await page.wait_for_timeout(1000)  # 等待1秒确保内容加载

    def login(self, response):
        # self.logger.debug(f"当前页面URL: {response.url}")
        # login =  response.css("#user_login")
        # pw =   response.css("#user_password")
        
        # projects_ulist = response.css(".project-title .repository")
        # self.logger.debug(f"找到项目数量: {len(projects_ulist)}")
        page: Page = response.meta["playwright_page"]
        page.locator("#user_login").fill("4384213@qq.com")
        page.locator("#user_password").fill("JHQ@123")
        page.locator(".ui.fluid.orange.submit.button.large").click()
        page.wait_for_timeout(2000)
        find = re.findall("个人主页",response.body.decode())
        print(f"find:{find}")
        
    # def after_login(self, response):
    def parse(self, response, **kwargs):
        find = re.findall("个人主页",response.body.decode())
        print(f"find:{find}")
     
     
    # def start_requests(self):   
    # # def login2(self, response):
    #     cookie = "sensorsdata2015jssdkchannel=%7B%22prop%22%3A%7B%22_sa_channel_landing_url%22%3A%22%22%7D%7D; oschina_new_user=false; remote_way=http; Hm_lvt_1240da2fa24c0217cca0e838bee1d3e2=1710389907; sensorsdata2015jssdkcross=%7B%22distinct_id%22%3A%2218e6414c464378-0f42b437e7d6fc-4c657b58-2073600-18e6414c465d8d%22%2C%22first_id%22%3A%22%22%2C%22props%22%3A%7B%22%24latest_traffic_source_type%22%3A%22%E5%BC%95%E8%8D%90%E6%B5%81%E9%87%8F%22%2C%22%24latest_search_keyword%22%3A%22%E6%9C%AA%E5%8F%96%E5%88%B0%E5%80%BC%22%2C%22%24latest_referrer%22%3A%22https%3A%2F%2Fgithub.com%2Fzhangguirong%2Fzhangguirong.github.io%2Fissues%2F4%22%2C%22%24latest_utm_source%22%3A%22blog_lxf%22%2C%22%24latest_utm_medium%22%3A%22sem%22%2C%22%24latest_utm_campaign%22%3A%22enterprise%22%2C%22%24latest_utm_content%22%3A%22competition%22%2C%22%24latest_utm_term%22%3A%22%25C8%25ED%25BC%25FE%25BF%25AA%25B7%25A2%25C8%25ED%25BC%25FE%22%7D%2C%22identities%22%3A%22eyIkaWRlbnRpdHlfY29va2llX2lkIjoiMTg5YTY4OTkwYzY5OGItMDFhZWFiODI2YzRjYjFkLTdlNTY1NDdiLTE1MDAwMDAtMTg5YTY4OTkwYzc5ODMiLCIkaWRlbnRpdHlfbG9naW5faWQiOiIxNDE4MDY1In0%3D%22%2C%22history_login_id%22%3A%7B%22name%22%3A%22%24identity_login_id%22%2C%22value%22%3A%221418065%22%7D%2C%22%24device_id%22%3A%22189a68990c698b-01aeab826c4cb1d-7e56547b-1500000-189a68990c7983%22%7D; user_locale=zh-CN; slide_id=10; Serve_State=true; tz=Asia%2FShanghai; Hm_lvt_24f17767262929947cc3631f99bfd274=1740393773,1740789750,1741242986,1741317485; HMACCOUNT=EB190A02055073EE; BEC=1f1759df3ccd099821dcf0da6feb0357; Hm_lpvt_24f17767262929947cc3631f99bfd274=1741588592; gitee_user=true; csrf_token=nfpvpQgLUnzjGYcTVhvM2dyY%2BprzzJ5zaOfn6rIYv5qgpIcC5GUKlWYHb3L2VYCINGEQgBpZX97EOg1xZhUnTg%3D%3D; gitee-session-n=UDErMWhIUVlkNW1QcFk3eFBRbXdtTmFRTnhHN2FJbnhWWXh4SHlkZ3FWTzhXZXoxMzRpRDFNajZMckpXK3BMb3NQa01TellUWDdETk5ucnVCS2dYbWxsd3hTYk9QTjhrUUZSSkNzL0Y4akhZbEJoeWs1QnpORkNPc2dnTDlEWE5IMVhtWlh0RTFaQ1hKL3lGUjRHK1AzYjNxbkRuL09VMHVyYXNncXdTcCtXbmNFR3l5T2Z2ZmRTSldES3JQYzFpVUE5NHVMOFNURDZ0UDY4cmtzVG1OdmRtNE1GMWc4WkNSNXZYNVJRenNoS05qTEJNTnJwMC80MlNZSTFQTmtyY3hNVVFOc3o4Q2VyTFRacDdJbzJab1VLdlFDWGVPTGhwL2VORFlCQ1BnaUR6WnNiQWJ5bDhWRHVWeDB5ckgvSjhENGtQZXhmMGxuOTJzdGJZWjE0QVQyQm9WUC9JTUg5S3lOMSs1VDZpakpVY1VYek1vVno0SlhVd2tveDNyUzFrRzlvQktWOWRyNjFGenR2NThPZHNWa1h4T1ZvL2hKSGxFRXMrYXkrdmphLzV0aHNUYThyMFdyZCtaQ2wvdk00MTl6eFlBTUNtRzk4bklpMWQwbWltdlFZQUhyeG51OEhWMzhuWXNmUDVpcU44SThZY1FIWmhib3BRYk40VWpudDFMWElrZllxYUFESGV2YlJOM3AvbzBrRXVxOHpqeS91Ti81KzZmSFA5WGZIQWJlWWU1bUVnSU5la2RzeDBSMzMwYkxYUGR0NGphQ3lKeGI2SkdKWm50T1pscnQ4REo5NE83L21xL2oyVVZjV0R4REN0c1lmeW03M3dsNGlqY1M1QS0tWEJ6RDlKYXA2MjhhWXZ3Tk1mbUZvdz09--f2a5387d2dbac0b607df3160fab65a210cc7fea3"
    #     cookies = {i.split('=')[0]:i.split('=')[1] for i in cookie.split(";")}
    #     yield scrapy.Request(self.start_urls[0],cookies=cookies)
