import requests
import re
from bs4 import BeautifulSoup
import pandas as pd
import json


def get_earthquake_page_number(url, param):
    try:
        req_data = requests.get(url, params=param, timeout=5)
        req_data.encoding = req_data.apparent_encoding
        text = req_data.text
    except TimeoutError:
        text = ""
        print("TimeoutError")

    method = re.compile(r"jQuery\d*?_\d*?&_=\d*?\(")
    try:
        head = method.match(req_data.text).group(0)
    except AttributeError:
        print("not find jQuery")

    json_text = text[len(head):-1]
    json_json = json.loads(json_text)
    soup = BeautifulSoup(json_json["page"], features="lxml")
    soup.find(alt="尾页")

    return eval(soup.find(alt="尾页")["page"])


def get_earthquake_dict(url, param, number):
    all_data = []
    for i in range(number + 1):
        page_param = param
        page_param["page"] = i
        try:
            req_data = requests.get(url, params=page_param, timeout=5)
            req_data.encoding = req_data.apparent_encoding
            text = req_data.text
        except TimeoutError:
            text = ""
            print("TimeoutError")

        method = re.compile(r"jQuery\d*?_\d*?&_=\d*?\(")
        try:
            head = method.match(req_data.text).group(0)
        except AttributeError:
            print("not find jQuery")

        json_text = text[len(head):-1]
        json_json = json.loads(json_text)
        all_data = all_data + json_json["shuju"]
        print("page {} has been finished!".format(i))

    return all_data


if __name__ == "__main__":
    url0 = "http://www.ceic.ac.cn/ajax/search"
    param0 = {
        "page": "",
        "start": "1900-01-01",
        "end": "2020-04-17",
        "jingdu1": "",
        "jingdu2": "",
        "weidu1": "",
        "weidu2": "",
        "height1": "",
        "height2": "",
        "zhenji1": "",
        "zhenji2": "",
        "callback": "jQuery18006536563323020064_1587103599427&_=1587103669801",
    }
    data_list = get_earthquake_dict(url0, param0, get_earthquake_page_number(url0, param0))
    df = pd.DataFrame(data_list)
    df.to_csv("全球地震数据.csv")
    df.to_excel("全球地震数据.xlsx")
