import se
import os
import sys

class secmd:
  kNone = 0
  kCleanIdx = 1
  kUpdateIdx = 2
  kSearchTest = 3
  kSearch = 4
  kQuery = 5
  kPageRank = 6
  kPageRankTest = 7
  kSearchNetTest = 8

def parse_cmd(argv):
  if len(argv) < 2: return secmd.kUpdateIdx
  if argv[1] == 'ui': return secmd.kUpdateIdx
  if argv[1] == 'ci': return secmd.kCleanIdx
  if argv[1] == 'st': return secmd.kSearchTest
  if argv[1] == 's': return secmd.kSearch
  if argv[1] == 'q': return secmd.kQuery
  if argv[1] == 'pt': return secmd.kPageRankTest
  if argv[1] == 'pr': return secmd.kPageRank
  if argv[1] == 'snt': return secmd.kSearchNetTest
  return kNone

if __name__ == '__main__':
  pagelist = [
      'http://kiwitobes.com/wiki/Categorical_list_of_programming_languages.html']
      
  dbname = 'searchindex.db'
  
  cmd = parse_cmd(sys.argv)

  if cmd == secmd.kSearchNetTest:
    import sn
    nn = sn.searchnet('nn.db')
    nn.maketables()

    wWorld, wRiver, wBank = 101, 102, 103
    uWorldBank, uRiver, uEarth = 201, 202, 203

    allurls = [uWorldBank, uRiver, uEarth]

    nn.generatehiddennode([wWorld, wBank], allurls)

    for c in nn.db.queryall('wordhidden', '*'): print c
    print('-------------')
    for c in nn.db.queryall('hiddenurl', '*') : print c
    print('-------------')

    print(nn.getresult([wWorld, wBank], allurls))

    nn.train([wWorld, wBank], allurls, uWorldBank)
    print(nn.getresult([wWorld, wBank], allurls))

    for i in range(30):
      nn.train([wWorld, wBank], allurls, uWorldBank)
      nn.train([wRiver, wBank], allurls, uRiver)
      nn.train([wWorld], allurls, uEarth)
    

    print(nn.getresult([wWorld, wBank], allurls))
    print(nn.getresult([wRiver, wBank], allurls))
    print(nn.getresult([wBank], allurls))
    print(nn.getresult([wWorld], allurls))

  if cmd == secmd.kCleanIdx: 
    try:
      os.remove(dbname)
    except:
      pass

  crawler = se.crawler(dbname)
  if cmd == secmd.kCleanIdx: crawler.createindextables()
  if cmd == secmd.kUpdateIdx: crawler.crawl(pagelist)
  if cmd == secmd.kSearchTest:
    print([row for row in crawler.db.query('wordlocation', 'rowid', 'wordid=1')])

  if cmd == secmd.kPageRank: crawler.calculatepagerank()
  if cmd == secmd.kPageRankTest:
    cur = crawler.db.queryall('pagerank', '*', 'order by score desc')
    pr = []
    for i in range(3): 
      pr.append(cur.next())
      print(pr[i])

  q = 'functional programming'
  if cmd == secmd.kSearch:
    e = se.searcher(dbname)
    res = e.getmatchrows(q)
    print(res[0])

  if cmd == secmd.kQuery:
    e = se.searcher(dbname)
    e.query(q)

