import urllib
import requests
from flask import Flask, jsonify,request
from urllib import parse
import urlManager, html_downloader, html_parser
import bs4
import json
app = Flask(__name__)

class spider_main(object):

    def __init__(self):
        self.urls = urlManager.urlManager()
        self.downloader = html_downloader.htmlDownloader()
        self.parser = html_parser.htmlParser()

    def craw(self, root_url):
        result=""
        self.urls.add_new_url(root_url)
        while self.urls.has_new_url():
            try:
                url = self.urls.get_new_url()
                html_cont = self.downloader.download(url)
                result = self.parser.parse(url, html_cont)

            except Exception as e:
                print("exception")
        return result

#定义接口
@app.route('/api/v1.0/getData', methods=['GET', 'POST'])
def get_tasks():
    schoolName=""
    if request.method == 'POST':
        schoolName = request.form['schoolName']
    else:
        schoolName = request.values.get("schoolName")

    schoolName = parse.quote(schoolName)
    objSpider = spider_main()
    result = objSpider.craw("https://baike.baidu.com/item/" + schoolName)
    return result

#调用接口同时传递参数
@app.route('/api/v1.0/invoke', methods=['GET', 'POST'])
def invoke():
    data = bytes(urllib.parse.urlencode({'schoolName': '芝加哥洛约拉大学'}), encoding='utf8')
    response = urllib.request.urlopen('http://127.0.0.1:5000/api/v1.0/getData',data)
    result = response.read()
    print(json.loads(result))
    return result



if __name__ == '__main__':
    app.run()

