import scrapy
import cloudscraper

class AcsSpider(scrapy.Spider):
    name = "toscrape-acs"

    custom_settings = {
        "ROBOTSTXT_OBEY": False,
    }
    browser = cloudscraper.create_scraper() # 这行是你需要加入的代码
    def start_requests(self):
        
        url = "https://pubs.acs.org/action/doSearch?AllField=Thickness"

        headers = {
            "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
            "Accept-Language": "en-US,en;q=0.9",
            "Referer": "https://pubs.acs.org/",
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
                          "(KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
            "Sec-CH-UA": "\"Google Chrome\";v=\"120\", \"Not(A:Brand\";v=\"8\", \"Chromium\";v=\"120\"",
            "Sec-CH-UA-Mobile": "?0",
            "Sec-CH-UA-Platform": "\"Windows\"",
            "Upgrade-Insecure-Requests": "1",
            "Sec-Fetch-Site": "same-origin",
            "Sec-Fetch-Mode": "navigate",
            "Sec-Fetch-User": "?1",
            "Sec-Fetch-Dest": "document",
            "DNT": "1",
            "Cache-Control": "max-age=0",
            "Cookie": "__cf_bm=uXtJTWOm7sOtxONu3eJXMYt_SDczVVSWAVUDwolDi1Y-1761015158-1.0.1.1-_6U9rlTwTGEeBYmKw_6M7hiG0PwQmh5CjeiJ2sNJ.mOC9nAHsO3xJ6m5oseXrrSIxt5Q6Hp_eJc64ksPNy6r5lDzFdW2YYekmxJV5BvhjzY;"
              "MAID=a0FWUnMc/3aji/UN6BpRpQ==;"
              "MACHINE_LAST_SEEN=2025-10-20T20:03:24.653-07:00;"
              "JSESSIONID=25E874D7FCDABF520F211F734F89B07A;"
              "ACSEnt=362414_6105_1760598636940"
        }


        scraper = cloudscraper.create_scraper(
            browser={'browser': 'chrome', 'platform': 'windows', 'mobile': False}
        )

        # 直接同步请求
        resp = scraper.get(url, headers=headers)
        self.logger.info(f"Status: {resp.status_code}")

        # 构造 Scrapy Response（可选）
        response = scrapy.http.HtmlResponse(
            url=url,
            body=resp.text,
            encoding='utf-8',
            request=None
        )

        # 写文件或解析
        with open("acs_cloudscraper.html", "w", encoding="utf-8") as f:
            f.write(resp.text)

        yield response
