#!/usr/bin/python
from urllib import request as req
from bs4 import BeautifulSoup as bs
import requests
import json
from flask import Flask,request
import os
# from proxy.Manager.ProxyManager import ProxyManager
app = Flask(__name__)

@app.route('/get', methods=['GET', 'POST'])
def getUrls():
    urls = request.form.get("url")
    if urls is None:
        urls = request.values.get("url")
    # file = 'D:/test.txt'
    # os.path.exists(file)
    # urls = "http://www.whatismyip.com.tw/"
    proxy = ""
    proxies = {"https": "https://{proxy}".format(proxy=proxy)}
    # r = requests.get('https://www.baidu.com', proxies=proxies, timeout=40, verify=False)

    res = requests.get(urls, proxies=proxies, timeout=60)
    while res.status_code is not 200:
        proxy = ProxyManager().get();
        proxies = {"https": "https://{proxy}".format(proxy=proxy)}
        res = requests.get(urls, proxies=proxies, timeout=30)
    # res = open(file, encoding='utf-8')
    # data = res.read()
    # 解析html界面
    soup = bs(res.content, 'html.parser')
    # 遍历所有一级标签
    body = soup.body
    result = {
        "data": {}
    }
    table = body.find("table", "static_table tbody_table static_tbody_table")
    if table is not None:
        tbody = table.find("tbody")
    else:
        print("无数据")
        return "暂无数据"
    trdata = list()
    tr = tbody.find_all("tr")
    for it in tr:
        tds = {}
        td = it.find_all("td")
        tds["id"] = td[0].find("div").string
        tds["secu_code"] = td[2].find("div").string
        tds["secu_name"] = td[3].find("a").string
        trdata.append(tds)
    result['data'] = trdata
    return str(result).encode("utf-8")
if __name__ == '__main__':
    app.debug = False
    app.run(host='127.0.0.1', port='9999')