#coding=utf-8
import sys
sys.path.append('../')
from xapian_index import Xapian_Search
from WebPage.forms import WebsiteCreationForm, KeywordCreationForm, ClassesCreationForm
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.http import HttpResponse, HttpResponseRedirect
from WebPage.models import Website, webpage, lda_topics
from accounts.pages_assistant import Page_Assistant
from django.contrib.auth.decorators import login_required
from urllib2 import Request, urlopen, URLError, HTTPError
import jieba
import jieba.analyse
from lda.create_corpus import MyGensim

@login_required
def show_webpage(request, webpage_id):

    page = webpage.objects.get(pk=webpage_id)
    content = page.text

    mygensim = MyGensim()
    lda = mygensim.get_ldamodel()
    doc_vec = mygensim.doc2bow(content)
    topic_distribution = lda[doc_vec]

    topic_distribution = sorted(topic_distribution, key=lambda a:a[1], reverse=True)

    recommendations_list = topic_distribution          

    hot_words = jieba.analyse.extract_tags(content, topK=10)
    hot_words_textrank = jieba.analyse.textrank(content)

    text = content.split('\n')
    return render_to_response(
        'show_webpage.html',
        RequestContext(request, {'username': request.user.username,
                                 'webpage': page,
                                 'hot_words': hot_words,
                                 'hot_words_textrank': hot_words_textrank,
                                 'text': text,
                                 'recommendations_list': recommendations_list}))

@login_required
def search_topic(request):
    topic_no = request.GET['topic_no']
    p = request.GET['p']
    webpages = Search_Topic(topic_no)
    topic = lda_topics.objects.get(topic_no=topic_no)
    
    #webpages, page_nums = fun(webpages, p)
    #if p > 1:
        #keyword = word

    #cur_page = int(p)
    #if cur_page <= 1:
    #    pre_page = 1
    #else:
    #    pre_page = cur_page - 1
    #if cur_page >= page_nums[-1]:
    #    nex_page = page_nums[-1]
    #else:
    #    nex_page = cur_page + 1
    page_assistant = Page_Assistant(obj = webpages)
    pre_page = page_assistant.get_pre_page_no(int(p))
    cur_page = int(p)
    nex_page = page_assistant.get_nex_page_no(int(p))
    b, e = page_assistant.get_objects_by_pageno(cur_page)
    webpages = webpage.objects.all().order_by('-dow_time')[b:e]
    page_nums = page_assistant.get_pages_list(cur_page)

    print "webpages :" + str(len(webpages))
    return render_to_response(
        'search.html',
        RequestContext(request, {'webpages': webpages,
                                 'username': request.user.username,
                                 'page_nums': page_nums,
                                 'topic_no': topic_no,
                                 'pre_page': pre_page,
                                 'cur_page': cur_page,
                                 'nex_page': nex_page,
                                 'topic':topic.words}))

@login_required
def search(request):
    word = request.GET['word']
    p = request.GET['p']
    webpages = Search(word)
    
    #webpages, page_nums = fun(webpages, p)
    if p > 1:
        keyword = word

    #cur_page = int(p)
    #if cur_page <= 1:
    #    pre_page = 1
    #else:
    #    pre_page = cur_page - 1
    #if cur_page >= page_nums[-1]:
    #    nex_page = page_nums[-1]
    #else:
    #    nex_page = cur_page + 1

    page_assistant = Page_Assistant(obj = webpages)
    pre_page = page_assistant.get_pre_page_no(int(p))
    cur_page = int(p)
    nex_page = page_assistant.get_nex_page_no(int(p))
    b, e = page_assistant.get_objects_by_pageno(cur_page)
    webpages = webpage.objects.all().order_by('-dow_time')[b:e]
    page_nums = page_assistant.get_pages_list(cur_page)

    return render_to_response(
        'search.html',
        RequestContext(request, {'webpages': webpages,
                                 'username': request.user.username,
                                 'page_nums': page_nums,
                                 'keyword': keyword,
                                 'pre_page': pre_page,
                                 'cur_page': cur_page,
                                 'nex_page': nex_page}))

def Search(word):
    xapian_search = Xapian_Search()
    search_result = xapian_search.search(word)
    webpages = list()
    for each in search_result:
        try:
            for page in webpage.objects.filter(link=each):
                webpages.append(page)
        except webpage.DoesNotExist:
            pass
    return webpages

def Search_Topic(topic_no):
    xapian_search = Xapian_Search()
    search_result = xapian_search.search_topic(topic_no)
    webpages = list()
    for each in search_result:
        try:
            for page in webpage.objects.filter(link=each):
                webpages.append(page)
        except webpage.DoesNotExist:
            pass
    return webpages

def fun(webpages, k, page_length=10):
    page_k = webpages[page_length * (int(k) - 1) : min(page_length * int(k), len(webpages))]
    page_no = list()
    page_size = len(webpages) / page_length + 2
    if len(webpages) % page_length == 0 and len(webpages) > 0:
        page_size -= 1
    for i in range(1, page_size):
        page_no.append(i)
    return page_k, page_no
