from database.db_handler import MysqlHander
from common.my_http import MyHttp
import urllib.request
from bs4 import BeautifulSoup
from common.macro_data import MacroData
import glob
import jieba
import re
import threading
import hashlib
import json

'''
宏观数据：发展改革委员会
'''

class FagaiweiData:
    def __init__(self):
        self.macro = MacroData()
    
    def history(self):
        for i in range(2021,2023):
            page = 1
            url = "https://fwfx.ndrc.gov.cn/api/query?qt=&tab=all&page=" + str(page) + "&pageSize=20&siteCode=bm04000fgk&key=CAB549A94CF659904A7D6B0E8FC8A7E9&startDateStr=" + str(i) + "-01-01&endDateStr=" + str(i) + "-12-31&timeOption=2&sort=dateDesc"
            while self.realtime(url, page):
                url = "https://fwfx.ndrc.gov.cn/api/query?qt=&tab=all&page=" + str(page) + "&pageSize=20&siteCode=bm04000fgk&key=CAB549A94CF659904A7D6B0E8FC8A7E9&startDateStr=" + str(i) + "-01-01&endDateStr=" + str(i) + "-12-31&timeOption=2&sort=dateDesc"
                page = page + 1
    
    def realtime(self, url, page):
        json_data = json.loads(MyHttp.http_json_data(url))
        cnt = json_data["data"]["totalHits"]
        if cnt == 0:
            return False
            
        for d in json_data["data"]["resultList"]:
            data_title = d["title"]
            data_date = d["docDate"]
            data_url = d["url"]
            self.macro.add_macro_data(data_date, 100, data_title, data_url, "发改委")
        if cnt <= (page * 20):
            return False
        return True

    
    def get_data(self):
        #self.history()
        self.realtime("https://fwfx.ndrc.gov.cn/api/query?qt=&tab=all&page=1&pageSize=20&siteCode=bm04000fgk&key=CAB549A94CF659904A7D6B0E8FC8A7E9&startDateStr=2022-01-01&endDateStr=2022-12-31&timeOption=2&sort=dateDesc", 1)

def fagaiwei_api():
    d = FagaiweiData()
    d.get_data()

if __name__ == '__main__':
    fagaiwei_api()