#!/usr/bin/python
# coding=utf-8
from flask import Flask
import io
import time
import random
import sys
from bs4 import BeautifulSoup
import urllib
import requests
from flask import request
import threading

if sys.getdefaultencoding() != 'utf-8':
    reload(sys)
    sys.setdefaultencoding('utf-8')

# --------- config -----------
favourite_sites = ['zhihu.com', 'jianshu.com', 'cnblogs.com', 'juejin.im', 'ibm.com', 'github.com', 'baidu.com']
#favourite_sites = ['baidu.com']
template_search_page = './baidu.html'
baseurl_baidu = "http://www.baidu.com/s?wd="
# ----------------------------

# --------- resource ---------
app = Flask(__name__)


# index.html
@app.route('/')
def helloworld():
    re = open(template_search_page).read()
    return re

@app.route('/content-search.xml')
def content_search():
    re = open(template_search_page).read()
    return re

# result.html
@app.route('/s',methods = ['GET'])
def search_with_baidu():
    start_time = time.time()
    word = request.args.get('wd')
    word = urllib.unquote(word)
    result = 'no content'
    try:
        result = search(word)
    except Exception as e:
        print e
        return e.message
    spent_time = time.time() - start_time
    print 'spent-time:', spent_time
    return result


# ---------- modify config --------
@app.route('/help')
def helloworld_help():
    return response(helps)


@app.route('/addsite')
def helloworld_site():
    site = request.args.get('site')
    if site not in favourite_sites:
        favourite_sites.append(site)
    return response(favourite_sites)


@app.route('/delsite')
def helloworld_site_del():
    site = request.args.get('site')
    if site in favourite_sites:
        favourite_sites.remove(site)
    return response(favourite_sites)


@app.route('/listsite')
def helloworld_site_list():
    return response(favourite_sites)


# -----------------------------


def search(word):
    result_soups = []
    for url in generate_search_tasks(word):
        t = threading.Thread(target=handle_url, args=(url,result_soups))
        t.start()
        t.join()
    base_soup = create_result_base_soup(word)
    page_soup = put_result_into_page(base_soup, result_soups)
    return str(page_soup)


def put_result_into_page(base_soup, result_soups):
    content_soup = base_soup.select('#content_left')[0]
    random.shuffle(result_soups)
    for result_soup in result_soups:
        content_soup.append(result_soup)
    return base_soup


def create_result_base_soup(word):
    base_soup = BeautifulSoup(requests.get(baseurl_baidu + word).text, "html.parser")
    # laji = base_soup.select('div.c-container')
    # for re in laji:
    #     re.decompose()
    return base_soup


def handle_url(url, result_soups):
    print 'url:', url
    html_soup = BeautifulSoup(requests.get(url).text, "html.parser")
    result_soups.extend(html_soup.select('div.result'))


def generate_search_tasks(word):
    tasks = []
    for site in favourite_sites:
        key = word + ' site:' + site
        url = baseurl_baidu + key
        tasks.append(url)
    return tasks


def response(obj):
    res = ''
    print 'type(obj)', type(obj)
    if type(obj) == 'list':
        for i in obj:
            res = res + response(i)
    elif type(obj) == type('1') or type(obj) == type(u'1'):
        print 'str', obj
        return str(obj.replace('\n', '<br>'))
    else:
        return str(obj)


helps = '''/s?wd=xxx
/addsite?site=xxx
/delsite?site=xxx
/listsite

version:1.0
author:wangguize
'''

if __name__ == '__main__':
    app.run()
    # test()
    # main('word')
