import gzip, MySQLdb
import nltk, json


def create_table(connection):
	writer = connection.cursor()
	writer.execute('''CREATE TABLE BookReviews (UserID varchar(20), Dictionary text, Score text)''')
	connection.commit()

def read_reviews_from_file(filename):
	with gzip.open(filename,'r') as orfile:
		obj = {}
		line = "start"
		items = []
		while line != '':
			line = orfile.readline()
			buf = [w.strip() for w in line.split(":")]
			if buf[0] != "":
				obj[buf[0]]=buf[1]
			else:
				yield (obj["review/userId"], obj["product/productId"], obj["review/score"])
				obj = {}

def read_books_from_file(filename):
	books = []
	line = "start"
	with gzip.open(filename, 'r') as orfile:
		while line != '':
			line = orfile.readline()
			books.append(line.strip())
			print len(books), " out of 929,264 read"
	books.pop()
	return books

def get_description_for_product(reader, productID):
	print "Getting description for book: ", productID
	reader.execute("""SELECT Description FROM Descriptions WHERE ProductID = %s""", (productID,))
	description = reader.fetchone()[0]
	return description

def get_lemmas_of_text(text):
	print "Lemmatizing description"
	wnl = nltk.WordNetLemmatizer()
	sentences = nltk.sent_tokenize(text)
	sentences = [nltk.word_tokenize(sent) for sent in sentences]
	result = []
	for sentence in sentences:
		for word in sentence:
			if not word.lower() in nltk.corpus.stopwords.words('english')+[',','.',':','!']:
				result.append(wnl.lemmatize(word.lower().strip(".-!;'")))
	return result

if __name__ == "__main__":
	Books = set(read_books_from_file("books.txt.gz"))
	print "Loaded books into memory"
	conn = MySQLdb.connect(host="mysql.stalidis.com", user="ict4growth", passwd="stanford", db="ict4growth")
	print "Connected to database"
	conn.autocommit(False)
	reader = conn.cursor()
	iterator = 0
	for review in read_reviews_from_file("BookReviews.txt.gz"):
		iterator += 1
		user = review[0]
		product = review[1]
		score = review[2]
		print "Read review ", iterator
		if product in Books:
			print "Review is about a book"
			description = get_description_for_product(reader, product)
			lemmas = get_lemmas_of_text(description)
			dictionary = Counter()
			for word in lemmas:
				dictionary[word] += 1
			Buffer.append((user, json.dumps(dictionary), score))
		if len(Buffer) > 99:
			writer.executemany("""INSERT TO BookReviews VALUES (%s,%s,%s)""", Buffer)
			print "Written ", len(Buffer), " reviews"
			conn.commit()
			Buffer = []
	conn.commit()
	conn.close()



