from datetime import datetime, date

from django.http import HttpResponse

from dragonforum.forum.models import Forum, Post, Thread, User

from dragonforum.api.models import PageCrawl


forum = Forum.objects.get(title='General Wheel of Time Discussion')


def update(request):
    type = request.REQUEST['type']
    print type
    if type == 'thread':
        id = request.REQUEST['id']
        title = request.REQUEST['title']
        url = request.REQUEST['url']
        Thread(id=id, forum=forum, title=title).save()
    elif type == 'post':
        id = int(request.REQUEST['id'])
        thread_id = int(request.REQUEST['thread'])
        author = request.REQUEST['author']
        try:
            author = User.objects.get(username=author)
        except User.DoesNotExist:
            author = User(username=author)
            author.save()
        try:
            thread = Thread.objects.get(id=thread_id)
        except Thread.DoesNotExist:
            thread = Thread(id=thread_id, title='Unknown thread', forum=forum)
            thread.save()
        dt = request.REQUEST['datetime']
        dt = dt.replace('at', date.today().strftime('%B %d, %Y,'))
        dt = datetime.strptime(dt, '%B %d, %Y, %I:%M:%S %p')
        text = request.REQUEST['text']
        Post(id=id, thread=thread, author=author, body=text, time=dt).save()
    return HttpResponse('OK')


def crawled_page(request):
    url = request.REQUEST['url']
    PageCrawl(url=url, last_crawl=datetime.now()).save()
    return HttpResponse('OK')


def is_page_crawled(request):
    url = request.REQUEST['url']
    try:
        crawl = PageCrawl.objects.get(url=url)
    except PageCrawl.DoesNotExist:
        return HttpResponse('no')
    if (datetime.now() - crawl.last_crawl).seconds >= 3600:
        return HttpResponse('old')
    return HttpResponse('yes')
