#!/usr/bin/python
from urllib import request as req
from bs4 import BeautifulSoup as bs
import requests
import json
from flask import Flask,request
import os
from proxy.Manager.ProxyManager import ProxyManager
import picDownload
from HangzhouMovie import getMovie
app = Flask(__name__)

@app.route('/get_stock/',  methods=['GET', 'POST'])
def getstock():
    urls = request.form.get("url")
    if urls is None:
        urls = request.values.get("url")
    proxy = ProxyManager().get()
    proxies = {"https": "https://{proxy}".format(proxy=proxy)}
    res = requests.get(urls, proxies=proxies, timeout=10)
    while res.status_code is not 200:
        proxy = ProxyManager().get()
        proxies = {"https": "https://{proxy}".format(proxy=proxy)}
        res = requests.get(urls, proxies=proxies, timeout=10)
    # res = open(file, encoding='utf-8')
    # data = res.read()
    # 解析html界面
    soup = bs(res.content, 'html.parser')
    body = soup.body
    result = {
        "data": {}
    }
    table = body.find("table", "static_table tbody_table static_tbody_table")
    if table is not None:
        tbody = table.find("tbody")
    else:
        print("无数据")
        return "暂无数据"
    trdata = list()
    tr = tbody.find_all("tr")
    for it in tr:
        tds = {}
        td = it.find_all("td")
        tds["id"] = td[0].find("div").string
        tds["secu_code"] = td[2].find("div").string
        tds["secu_name"] = td[3].find("a").string
        trdata.append(tds)
    result['data'] = trdata
    return str(result).encode("utf-8")

@app.route('/test/',  methods=['GET', 'POST'])
def test():
    return "succ"


@app.route('/get_video/',  methods=['GET', 'POST'])
def getvideo():
    urls = request.form.get("url")
    if urls is None:
        urls = request.values.get("url")
    proxy = ProxyManager().get();
    proxies = {"https": "https://{proxy}".format(proxy=proxy)}
    res = requests.get(urls, proxies=proxies, timeout=10)
    while res.status_code is not 200:
        proxy = ProxyManager().get()
        proxies = {"https": "https://{proxy}".format(proxy=proxy)}
        res = requests.get(urls, proxies=proxies, timeout=10)
    # res = open(file, encoding='utf-8')
    # data = res.read()
    # 解析html界面
    soup = bs(res.content, 'html.parser')
    body = soup.body
    result = {
        "data": {}
    }
    table = body.find("table", "static_table tbody_table static_tbody_table")
    if table is not None:
        tbody = table.find("tbody")
    else:
        print("无数据")
        return "暂无数据"
    trdata = list()
    tr = tbody.find_all("tr")
    for it in tr:
        tds = {}
        td = it.find_all("td")
        tds["id"] = td[0].find("div").string
        tds["secu_code"] = td[2].find("div").string
        tds["secu_name"] = td[3].find("a").string
        trdata.append(tds)
    result['data'] = trdata
    return str(result).encode("utf-8")


@app.route('/get_movie/',  methods=['GET', 'POST'])
def getmovie():
    url = request.form.get("url")
    return getMovie(url)


@app.route('/get_pic/')
def getpic():
    url = request.form.get("url")
    picDownload.download(url)
    return "succeed"


if __name__ == '__main__':
    app.debug = False
    app.run(host='127.0.0.1', port='9999')