import requests


class AirPollutionCrawler:

    def __init__(self, city="北京市"):
        self.url = "https://air.cnemc.cn:18007/CityData/GetAQIDataPublishLive"
        self.headers = {
            "Referer": "https://air.cnemc.cn:18007/",
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36",
        }
        self.cookies = {
            "Hm_lvt_aadbcc83cc37610f46f503983c444e90": "1711543857,1711892802,1711892926",
            "Hm_lvt_0b8b0a2a4a45cbaaa6f549dcad3329a6": "1711543857,1711892802,1711892926",
            "Hm_lpvt_0b8b0a2a4a45cbaaa6f549dcad3329a6": "1711892969",
            "Hm_lpvt_aadbcc83cc37610f46f503983c444e90": "1711892969"
        }
        self.params = {
            "cityName": city
        }
        self.data = {}


    def get_data(self):
        try:
            response = requests.get(
                self.url,
                headers=self.headers,
                # cookies=self.cookies,
                params=self.params,
                verify=False
            )
            print(response.json())
        except Exception as e:
            print(e)


    def parse_data(self):
        pass

    def save_data(self):
        pass

    def run(self):
        self.get_data()


if __name__ == '__main__':
    base_city = "北京市"
    crawler = AirPollutionCrawler(base_city)
    crawler.run()
