# Create your views here.
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from bloganalisys.gui.forms import * #import all forms from this aplication
from urllib import urlopen
from bloganalisys.crawler import *
from bloganalisys.crawler.BlogParsers import *
from couchdb import Server
from couchdb.client import ResourceNotFound
import time
import cdb
SERVER = Server('http://127.0.0.1:5984')
# Narazie do testoww moze tak zostac
if (len(SERVER) == 0):
    SERVER.create('docs')
	
if not 'temp' in SERVER:
	SERVER.create('temp')

class URLError(Exception):
	pass
def welcome_view(request):
	#demo.start_demo()
	print cdb.COUCH_DB
	return render_to_response('welcome.html')

def analisys_view(request):
  return render_to_response('analisys.html')
def browser_view(request):
  return render_to_response('browser.html')


def instructions(request, id=None):
  

  return render_to_response('instructions.html' ,  {'type' : id })

def crawler_view(request):

	form = CrawlerForm()
	return render_to_response('crawler.html', { "info" : 'informacja od crawlera', 'form' : form } ) 

	
def crawler_check(request):
	
	
	if request.method == 'POST':
		form = CrawlerForm(request.POST)
		if form.is_valid():

				numer = request.POST['blog']
				address = request.POST['address']
				o = Onet(address)
				o.start()
				type = int(request.POST['action'])
      
				if type == 1:
            
				     information = (address, o.get_blog_by_address() )
				elif type ==2:

				    information = o.get_other_blogs_address()
				elif type ==3:
							information = (address, o.get_comment_list() )
							#print information
				return crawler_view_info(information, type)
				#docs = SERVER['docs']

				#docs[address]= {'type': 'onet', 'content': o.get_blog_by_address().decode('ISO-8859-2') }
				
				return  HttpResponseRedirect("/crawler/"+str(numer))
		
		else:
			form = CrawlerForm()
		
	return render_to_response('crawler.html', { "info" : "nie wszystkie pola zostaly wypelnione poporawnie",  'form': form,  'invalidData': True})


def crawler_note(request, id = None):

	print id

	#pobierz z bazy
	db = SERVER['temp']
	
	address = db[id]['address']
	author = db[id]['author']
	topic = db[id]['topic']
	o = Onet(address)
	o.start()
	content = o.get_blog_by_address()
	return render_to_response("crawler_info.html", { 'topic' : topic, 'address' : address, 'content' : content, 'type' : "blog_content2", 'author' : author} )



def crawler_comment2(id):

		print id
		db = SERVER['temp']
		temp = db[id]
		add_time = temp['add_time']
		content = temp['comment']
		author = temp['author']
		date = temp['date']

		ret = {'id_' : id, 'address' : temp['address'],  'add_time' : add_time, 'date_' : date , 'content' : content,  'author' : author , 'type' : 'comment2' }


	#pobierz z bazy

		return render_to_response("crawler_info.html", ret )


def crawler_view_info(information, type):
	print information
	
	db = SERVER['temp']
	
	if type ==1:
    #contern
		address, content = information
		id = db.create( { 'add_time' : str(time.localtime()), 
			'type' : 'blog_content', 
			'content' : content, 
			'address' : address} )

    
	if type == 2:
		# blog list
			id_list = []
			print len(information)
			for address, topic, author, date in information:
				id = db.create( {
								'add_time' : str(time.localtime()),
								'type' : 'blog_address',
								'address' : address,
								'topic' : topic,
								'author' : author,
								'date' : date
								}
								)
				id_list.append(id)
			
			id = db.create( {
			'add_time' : str(time.localtime()),
			'type' : 'blog_list',
			'ids' : id_list }
			
			)
			
			
			info = ['ok']
	if type == 3:
			id_list = []
			print len(information)
			address, c_list = information
			for c in c_list:
				for c_id, c_topic in c:
					d = {
									'add_time' : str(time.localtime()),
									'type' : 'blog_comment',
									'address' : address,
									'topic' : c_topic,
									'c_id' : c_id,
									'comment' : "",
									'author' : "",
									'date' : ""
									}
					print d
					id = db.create( d
									)
					id_list.append(id)
					
				id = db.create( {
				'add_time' : str(time.localtime()),
				'type' : 'comment_list',
				'ids' : id_list }
				
				)

	return HttpResponseRedirect("/crawler/db/"+str(id))
#e047f5fbb5376b116fc4e2b2cf9e94ae

def crawler_comment(request, id=None):
	info = [ 'ok']
	db = SERVER['temp']
	temp = db[id]
	
	address = temp['address']
	topic = temp['topic']
	c_id = temp['c_id']
	o = Onet(address)
	o.start()
	comment, author, date = o.get_one_comment( (c_id, topic))
	temp['comment'] = comment
	temp['author'] = author
	temp['date'] = date
	temp['add_time'] = str(time.localtime())	
	db[id] = temp
	info = [ (author, comment, date )] 
#	return render_to_response('crawler_info.html', {"info": info, 'type' : 'comment'})
	return crawler_comment2(id)
	pass


def crawler_info(request, id=None):
	
	db = SERVER['temp']
	temp = db[id]
	info_list = []
	if temp['type'] == 'blog_list':
		list = temp['ids']
		
		for i in list:
			try:
				record = db[i]
				
				#zavezpieczyc przed nieporawnymi danymi
				address = record['address']
				topic = record['topic']
				date = record['date']
				info_list.append( [i, address, topic, date ])
				#print info_list
			except KeyError:
			    pass

	if temp['type'] == 'blog_content':
	    content = temp['content']
	    address = temp['address']
	    info_list.append([id, content, address ])
	
	if temp['type'] == 'comment_list':
		
		list = temp['ids']
		
		for i in list:
			try:
				record = db[i]
				
				#zavezpieczyc przed nieporawnymi danymi
				
				
				address = record['address']
				topic = record['topic']
				c_id = record['c_id']
				#date = record['date']
				info_list.append( [i, c_id,  address, topic ])
				
				#print info_list
			except KeyError:
				print 'blod'
			    
	
	print len(info_list)
	info = info_list
	return render_to_response('crawler_info.html', {"info": info, 'type' : temp['type']})

def crawler_view_blog(request, numer =0):
	c = crawler.Crawler(r"C:\django\System\io-bloganalisys\bloganalisys\crawler\konfiguracja.xml")
	try:
		#docs = SERVER['docs']
		
		x = c.demo()
	except URLError:
		x = "Blad polaczenia"
	return render_to_response('crawler_view.html', { "info" : 'informacja od crawlera: ' + str(numer) + str(x)} ) 
	
