# import requests
# import urllib
# from ast import literal_eval
#
#
# def write(data):
#     try:
#         url = "http://127.0.0.1:5000/Today/Event/write"
#         headers = {
#             "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36"
#         }
#         Request_write = urllib.request.Request(url=url,headers = headers, data=data)
#         urllib.request.urlopen(Request_write)
#         return "写入成功"
#     except Exception as err:
#         return print(err)
#
# if __name__ == "__main__":
#     url = "https://today.help.bj.cn/read/?page=2&pagesize=20&month=9&day=22"
#     response = requests.get(url)
#     data = response.text
#     data= data.replace('"康州英雄"','‘康州英雄’')
#
#     Requise_data = literal_eval(data)
#     for item_ in range(len(Requise_data)):
#         data = {
#             "T_year": Requise_data[item_]["solaryear"],  # 发生的年份
#             "incident": Requise_data[item_]["title"],  # 事件
#             "month": "{0}-{1}".format(9, 22)  # 月份和日期
#         }
#         print(data)
#         data = urllib.parse.urlencode(data).encode("utf-8")
#         write(data)
#
#
#

import Crawler_data.Today_crawler as Today_crawler

if __name__ =="__main__":
    crawler=Today_crawler.main()
    print(crawler)

