import nltk


class SystemAgent():
    def __init__(self):
        pass

    def parse(self, review):
        # chop review to sentences
        # for every sentence find aspects
        # for every aspect find sentiment
        # aggregate aspects
        # return reduced list of sentiment tagged aspects
        pass

    def sent_tokenize(review):
        return nltk.sent_tokenize(review)

    def pos_tag_lower(sentence):
        tokens = nltk.word_tokenize(sentence.lower())
        words = nltk.word_tokenize(sentence)
        counter = 0
        tags = nltk.pos_tag(tokens)
        positions = []
        for word in words:
            sent = sentence[counter:]
            tf = sent.index(word) + counter
            tl = len(word)
            tt = tf + tl
            counter += tl
            positions.append((word, tf, tt))
        #for ((word,pos),(_,start,end)) in zip(tags, positions):
        #	yield (word, pos, start, end)
        return [(word, token, pos, start, end) for ((token,pos),(word,start,end)) in zip(tags, positions)]

    def pos_tag(sentence):
        tokens = nltk.word_tokenize(sentence)
        words = nltk.word_tokenize(sentence)
        counter = 0
        tags = nltk.pos_tag(tokens)
        positions = []
        for word in words:
            sent = sentence[counter:]
            tf = sent.index(word) + counter
            tl = len(word)
            tt = tf + tl
            counter += tl
            positions.append((word, tf, tt))
        #for ((word,pos),(_,start,end)) in zip(tags, positions):
        #	yield (word, pos, start, end)
        return [(word, token, pos, start, end) for ((token,pos),(word,start,end)) in zip(tags, positions)]



"""We had seen all the positive before arrival but cannot emphasise just how amazing this place is... 
Situated a short taxi ride from Chania town, it sports an amazing private cove for swimming and snorkelling in and just a short walk to a gorgeous nearby also. 
Breakfasts were great, the pool was relaxing and staff very very helpful! 
I really enjoyed my stayed here and will hopefully be back next summer :) """

[[('We', 'We', 'PRP', 0, 2), ('had', 'had', 'VBD', 3, 6), ('seen', 'seen', 'VBN', 7, 11), ('all', 'all', 'DT', 12, 15), ('the', 'the', 'DT', 16, 19), ('positive', 'positive', 'JJ', 20, 28), ('before', 'before', 'IN', 29, 35), ('arrival', 'arrival', 'NN', 36, 43), ('but', 'but', 'CC', 44, 47), ('can', 'can', 'MD', 48, 51), ('not', 'not', 'RB', 51, 54), ('emphasise', 'emphasise', 'VB', 55, 64), ('just', 'just', 'RB', 65, 69), ('how', 'how', 'WRB', 70, 73), ('amazing', 'amazing', 'VBG', 74, 81), ('this', 'this', 'DT', 82, 86), ('place', 'place', 'NN', 87, 92), ('is', 'is', 'VBZ', 84, 86), ('...', '...', ':', 95, 98)], [('Situated', 'Situated', 'NNP', 0, 8), ('a', 'a', 'DT', 9, 10), ('short', 'short', 'JJ', 11, 16), ('taxi', 'taxi', 'NN', 17, 21), ('ride', 'ride', 'NN', 22, 26), ('from', 'from', 'IN', 27, 31), ('Chania', 'Chania', 'NNP', 32, 38), ('town', 'town', 'NN', 39, 43), (',', ',', ',', 43, 44), ('it', 'it', 'PRP', 45, 47), ('sports', 'sports', 'VBZ', 48, 54), ('an', 'an', 'DT', 55, 57), ('amazing', 'amazing', 'NN', 58, 65), ('private', 'private', 'JJ', 66, 73), ('cove', 'cove', 'NN', 74, 78), ('for', 'for', 'IN', 79, 82), ('swimming', 'swimming', 'NN', 83, 91), ('and', 'and', 'CC', 92, 95), ('snorkelling', 'snorkelling', 'VBG', 96, 107), ('in', 'in', 'IN', 104, 106), ('and', 'and', 'CC', 92, 95), ('just', 'just', 'RB', 115, 119), ('a', 'a', 'DT', 111, 112), ('short', 'short', 'JJ', 122, 127), ('walk', 'walk', 'NN', 128, 132), ('to', 'to', 'TO', 133, 135), ('a', 'a', 'DT', 111, 112), ('gorgeous', 'gorgeous', 'JJ', 138, 146), ('nearby', 'nearby', 'NN', 147, 153), ('also', 'also', 'RB', 154, 158), ('.', '.', '.', 158, 159)], [('Breakfasts', 'Breakfasts', 'NNS', 0, 10), ('were', 'were', 'VBD', 11, 15), ('great', 'great', 'JJ', 16, 21), (',', ',', ',', 21, 22), ('the', 'the', 'DT', 23, 26), ('pool', 'pool', 'NN', 27, 31), ('was', 'was', 'VBD', 32, 35), ('relaxing', 'relaxing', 'VBG', 36, 44), ('and', 'and', 'CC', 45, 48), ('staff', 'staff', 'NN', 49, 54), ('very', 'very', 'RB', 55, 59), ('very', 'very', 'RB', 55, 59), ('helpful', 'helpful', 'JJ', 65, 72), ('!', '!', '.', 72, 73)], [('I', 'I', 'PRP', 0, 1), ('really', 'really', 'RB', 2, 8), ('enjoyed', 'enjoyed', 'VBD', 9, 16), ('my', 'my', 'PRP$', 17, 19), ('stayed', 'stayed', 'VBN', 20, 26), ('here', 'here', 'RB', 27, 31), ('and', 'and', 'CC', 32, 35), ('will', 'will', 'MD', 36, 40), ('hopefully', 'hopefully', 'RB', 41, 50), ('be', 'be', 'VB', 51, 53), ('back', 'back', 'RB', 54, 58), ('next', 'next', 'JJ', 59, 63), ('summer', 'summer', 'NN', 64, 70), (':', ':', ':', 71, 72), (')', ')', ':', 72, 73)]]

[[('We', 'we', 'PRP', 0, 2), ('had', 'had', 'VBD', 3, 6), ('seen', 'seen', 'VBN', 7, 11), ('all', 'all', 'DT', 12, 15), ('the', 'the', 'DT', 16, 19), ('positive', 'positive', 'JJ', 20, 28), ('before', 'before', 'IN', 29, 35), ('arrival', 'arrival', 'NN', 36, 43), ('but', 'but', 'CC', 44, 47), ('can', 'can', 'MD', 48, 51), ('not', 'not', 'RB', 51, 54), ('emphasise', 'emphasise', 'VB', 55, 64), ('just', 'just', 'RB', 65, 69), ('how', 'how', 'WRB', 70, 73), ('amazing', 'amazing', 'VBG', 74, 81), ('this', 'this', 'DT', 82, 86), ('place', 'place', 'NN', 87, 92), ('is', 'is', 'VBZ', 84, 86), ('...', '...', ':', 95, 98)], [('Situated', 'situated', 'VBD', 0, 8), ('a', 'a', 'DT', 9, 10), ('short', 'short', 'JJ', 11, 16), ('taxi', 'taxi', 'NN', 17, 21), ('ride', 'ride', 'NN', 22, 26), ('from', 'from', 'IN', 27, 31), ('Chania', 'chania', 'NN', 32, 38), ('town', 'town', 'NN', 39, 43), (',', ',', ',', 43, 44), ('it', 'it', 'PRP', 45, 47), ('sports', 'sports', 'VBZ', 48, 54), ('an', 'an', 'DT', 55, 57), ('amazing', 'amazing', 'NN', 58, 65), ('private', 'private', 'JJ', 66, 73), ('cove', 'cove', 'NN', 74, 78), ('for', 'for', 'IN', 79, 82), ('swimming', 'swimming', 'NN', 83, 91), ('and', 'and', 'CC', 92, 95), ('snorkelling', 'snorkelling', 'VBG', 96, 107), ('in', 'in', 'IN', 104, 106), ('and', 'and', 'CC', 92, 95), ('just', 'just', 'RB', 115, 119), ('a', 'a', 'DT', 111, 112), ('short', 'short', 'JJ', 122, 127), ('walk', 'walk', 'NN', 128, 132), ('to', 'to', 'TO', 133, 135), ('a', 'a', 'DT', 111, 112), ('gorgeous', 'gorgeous', 'JJ', 138, 146), ('nearby', 'nearby', 'NN', 147, 153), ('also', 'also', 'RB', 154, 158), ('.', '.', '.', 158, 159)], [('Breakfasts', 'breakfasts', 'NNS', 0, 10), ('were', 'were', 'VBD', 11, 15), ('great', 'great', 'JJ', 16, 21), (',', ',', ',', 21, 22), ('the', 'the', 'DT', 23, 26), ('pool', 'pool', 'NN', 27, 31), ('was', 'was', 'VBD', 32, 35), ('relaxing', 'relaxing', 'VBG', 36, 44), ('and', 'and', 'CC', 45, 48), ('staff', 'staff', 'NN', 49, 54), ('very', 'very', 'RB', 55, 59), ('very', 'very', 'RB', 55, 59), ('helpful', 'helpful', 'JJ', 65, 72), ('!', '!', '.', 72, 73)], [('I', 'i', 'PRP', 0, 1), ('really', 'really', 'RB', 2, 8), ('enjoyed', 'enjoyed', 'VBD', 9, 16), ('my', 'my', 'PRP$', 17, 19), ('stayed', 'stayed', 'VBN', 20, 26), ('here', 'here', 'RB', 27, 31), ('and', 'and', 'CC', 32, 35), ('will', 'will', 'MD', 36, 40), ('hopefully', 'hopefully', 'RB', 41, 50), ('be', 'be', 'VB', 51, 53), ('back', 'back', 'RB', 54, 58), ('next', 'next', 'JJ', 59, 63), ('summer', 'summer', 'NN', 64, 70), (':', ':', ':', 71, 72), (')', ')', ':', 72, 73)]]

    def pos_tag_mix(sentence):
        tokens = nltk.word_tokenize(sentence.lower())
        words = nltk.word_tokenize(sentence)
        counter = 0
        tags_lower = nltk.pos_tag(tokens)
        tags = nltk.pos_tag(words)
        positions = []
        for word in words:
            sent = sentence[counter:]
            tf = sent.index(word) + counter
            tl = len(word)
            tt = tf + tl
            counter += tl
            positions.append((word, tf, tt))
        #for ((word,pos),(_,start,end)) in zip(tags, positions):
        #	yield (word, pos, start, end)
        return [(word, token, pos1, pos2, start, end) for ((token,pos1),(_,pos2),(word,start,end)) in zip(tags, tags_lower, positions)]



def chunk(tags):
    for i in xrange(0,len(tags)-2):
        if tags[i][2][0:2] == tags[i+1][2][0:2]:
            yield (tags[i][0] + " " + tags[i+1][0], tags[i][2], tags[i][-2], tags[i+1][-1])

 a]

s = [(word,pos) for (word,_,pos,_,_,_) in pos_tag_mix("Planet Thailand has always been a hit with me , I go there for the sushi, which is great, the thai food is excellent too .")]
NPChunker = nltk.RegexpParser(	"""	NN:	{<NN.*>+}
                    NN:	{<NNP>+}
                    JJ:	{<JJ><,><JJ>}
                    NP: 	{<DT|PP\$>?<JJ>*<NN>}
                    RR:	{<NP><NN>}
                """, loop = 2)

s = [(word,pos) for (word,_,pos,_,_,_) in pos_tag_mix("Breakfasts were great, the pool was relaxing and staff very very helpful! ")]
result = NPChunker.parse(s)
traverse(result)








result = NPChunker.parse([(word,pos) for (word,_,pos,_,_,_) in pos_tag_mix("Planet Thailand has always been a hit with me , I go there for the sushi, which is great, the thai food is excellent too .")])




def traverse(tree):
    for subtree in tree.subtrees():
        if subtree.node == 'NP': print(subtree)
        if subtree.node == 'RN': print(subtree)




def traverse(t):
 try:
  t.node
 except AttributeError:
  return
 else:
  if t.node == 'NP': print t # or do something else
  else:
   for child in t:
    traverse(child)

NPChunker.parse([(word,pos) for (word,_,pos,_,_,_) in pos_tag_mix("How much wood, would a woodchuck chuck, if a woodchuck could chuck wood?")])









