from flask import Flask
from flask.ext.restful import Resource, Api
from flask_restful import reqparse

import config
from spider import GsxtSpider

app = Flask(__name__)
api = Api(app)

get_parser = reqparse.RequestParser()
get_parser.add_argument('name', type=str, required=True)
get_parser.add_argument('use_chrome', type=int, required=False, default=1)


class Crawl(Resource):
    def get(self):
        args = get_parser.parse_args()
        name = args.get('name')
        use_chrome = args.get('use_chrome')
        if not name:
            return {
                'code': 400,
                'msg': '参数name不能为空'
            }
        if len(name) < 2:
            return {
                'code': 400,
                'msg': '请输入正确查询名称'
            }

        spider = GsxtSpider(name, use_chrome)
        spider.run()
        data = spider.data
        return {
            'code': 0,
            'msg': '请求成功',
            'data': data,
            'count': len(data)
        }


api.add_resource(Crawl, '/crawl')

if __name__ == '__main__':
    app.run(host='0.0.0.0', port=config.PORT, debug=True)
