{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "from PIL import Image,ImageDraw\n",
    "import PIL.Image as IMG\n",
    "\n",
    "import urllib as urllib\n",
    "import re\n",
    "\n",
    "import sqlite3 as sqlite\n",
    "\n",
    "from bs4 import BeautifulSoup as BeautifulSoup\n",
    "\n",
    "#from urlparse import urljoin\n",
    "from urllib.parse import urljoin  # as urljoin\n",
    "#urllib.parse  \n",
    "\n",
    "##from pysqlite2 import dbapi2 as sqlite\n",
    "\n",
    "import nn\n",
    "\n",
    "class crawler:\n",
    "\t# Initialize the crawler with the name of database\n",
    "\tdef __init__(self,dbname):\n",
    "\t\tself.con=sqlite.connect(dbname)\n",
    "        #self.createindextables(self)\n",
    "\t\n",
    "\tdef __del__(self):\n",
    "\t\tself.con.close()\n",
    "\n",
    "\tdef dbcommit(self):\n",
    "\t\tself.con.commit()\n",
    "\n",
    "\t# Auxilliary function for getting an entry id and adding \n",
    "\t# it if it's not present\n",
    "\tdef getentryid(self,table,field,value,createnew=True):\n",
    "\t\tcur=self.con.execute(\n",
    "\t\t\"select rowid from %s where %s='%s'\" % (table,field,value))\n",
    "\t\tres=cur.fetchone()\n",
    "\t\tif res==None:\n",
    "\t\t\tcur=self.con.execute(\n",
    "\t\t\t\"insert into %s (%s) values ('%s')\" % (table,field,value))\n",
    "\t\t\treturn cur.lastrowid\n",
    "\t\telse:\n",
    "\t\t\treturn res[0] \n",
    "\n",
    "\n",
    "\t# Index an individual page\n",
    "\tdef addtoindex(self,url,soup):\n",
    "\t\tif self.isindexed(url): return\n",
    "\t\tprint ('Indexing '+url)\n",
    "\t\n",
    "\t\t# Get the individual words\n",
    "\t\ttext=self.gettextonly(soup)\n",
    "\t\twords=self.separatewords(text)\n",
    "\t\t\n",
    "\t\t# Get the URL id\n",
    "\t\turlid=self.getentryid('urllist','url',url)\n",
    "\t\t\n",
    "\t\t# Link each word to this url\n",
    "\t\tfor i in range(len(words)):\n",
    "\t\t\tword=words[i]\n",
    "\t\t\tif word in ignorewords: continue\n",
    "\t\t\twordid=self.getentryid('wordlist','word',word)\n",
    "\t\t\tself.con.execute(\"insert into wordlocation(urlid,wordid,location) values (%d,%d,%d)\" % (urlid,wordid,i))\n",
    "\t\n",
    "\n",
    "\t\n",
    "\t# Extract the text from an HTML page (no tags)\n",
    "\tdef gettextonly(self,soup):\n",
    "\t\tv=soup.string\n",
    "\t\tif v==None:   ##original Null\n",
    "\t\t\tc=soup.contents\n",
    "\t\t\tresulttext=''\n",
    "\t\t\tfor t in c:\n",
    "\t\t\t\tsubtext=self.gettextonly(t)\n",
    "\t\t\t\tresulttext+=subtext+'\\n'\n",
    "\t\t\treturn resulttext\n",
    "\t\telse:\n",
    "\t\t\treturn v.strip()\n",
    "\n",
    "\t# Seperate the words by any non-whitespace character\n",
    "\tdef separatewords(self,text):\n",
    "\t\tsplitter=re.compile('\\\\W*')\n",
    "\t\treturn [s.lower() for s in splitter.split(text) if s!='']\n",
    "\n",
    "\t\t\n",
    "\t# Return true if this url is already indexed\n",
    "\tdef isindexed(self,url):\n",
    "\t\treturn False\n",
    "\t\n",
    "\t# Add a link between two pages\n",
    "\tdef addlinkref(self,urlFrom,urlTo,linkText):\n",
    "\t\twords=self.separatewords(linkText)\n",
    "\t\tfromid=self.getentryid('urllist','url',urlFrom)\n",
    "\t\ttoid=self.getentryid('urllist','url',urlTo)\n",
    "\t\tif fromid==toid: return\n",
    "\t\tcur=self.con.execute(\"insert into link(fromid,toid) values (%d,%d)\" % (fromid,toid))\n",
    "\t\tlinkid=cur.lastrowid\n",
    "\t\tfor word in words:\n",
    "\t\t\tif word in ignorewords: continue\n",
    "\t\t\twordid=self.getentryid('wordlist','word',word)\n",
    "\t\t\tself.con.execute(\"insert into linkwords(linkid,wordid) values (%d,%d)\" % (linkid,wordid))\n",
    "\n",
    "\t# Starting with a list of pages, do a breadth\n",
    "\t# first search to the given depth, indexing pages\n",
    "\t# as we go\n",
    "\tdef crawl(self,pages,depth=2):\n",
    "\t\tfor i in range(depth):\n",
    "\t\t\tprint('depth %d begins' % i)\n",
    "\t\t\tnewpages={}\n",
    "\t\t\tfor page in pages:\n",
    "\t\t\t\ttry:\n",
    "\t\t\t\t\tc=urllib.request.urlopen(page) # .read()\n",
    "\t\t\t\t\tprint(c)\n",
    "\t\t\t\texcept:\n",
    "\t\t\t\t\tprint (\"Could not open %s\" % page)\n",
    "\t\t\t\t\tcontinue\n",
    "\t\t\t\ttry:\n",
    "\t\t\t\t\tsoup=BeautifulSoup(c.read())\n",
    "\t\t\t\t\tself.addtoindex(page,soup)\n",
    "\t\n",
    "\t\t\t\t\tlinks=soup('a')\n",
    "\t\t\t\t\tfor link in links:\n",
    "\t\t\t\t\t\tif ('href' in dict(link.attrs)):\n",
    "\t\t\t\t\t\t\turl=urljoin(page,link['href'])\n",
    "\t\t\t\t\t\t\tif url.find(\"'\")!=-1: continue\n",
    "\t\t\t\t\t\t\turl=url.split('#')[0]  # remove location portion\n",
    "\t\t\t\t\t\t\tif url[0:4]=='http' and not self.isindexed(url):\n",
    "\t\t\t\t\t\t\t\tnewpages[url]=1\n",
    "\t\t\t\t\t\t\tlinkText=self.gettextonly(link)\n",
    "\t\t\t\t\t\t\tself.addlinkref(page,url,linkText)\n",
    "\t\n",
    "\t\t\t\t\tself.dbcommit()\n",
    "\t\t\t\texcept:\n",
    "\t\t\t\t\tprint (\"Could not parse page %s\" % page)\n",
    "\n",
    "\t\t\tpages=newpages\n",
    "\n",
    "\t\n",
    "\t# Create the database tables\n",
    "\tdef createindextables(self): \n",
    "\t\tself.con.execute('create table urllist(url)')\n",
    "\t\tself.con.execute('create table wordlist(word)')\n",
    "\t\tself.con.execute('create table wordlocation(urlid,wordid,location)')\n",
    "\t\tself.con.execute('create table link(fromid integer,toid integer)')\n",
    "\t\tself.con.execute('create table linkwords(wordid,linkid)')\n",
    "\t\tself.con.execute('create index wordidx on wordlist(word)')\n",
    "\t\tself.con.execute('create index urlidx on urllist(url)')\n",
    "\t\tself.con.execute('create index wordurlidx on wordlocation(wordid)')\n",
    "\t\tself.con.execute('create index urltoidx on link(toid)')\n",
    "\t\tself.con.execute('create index urlfromidx on link(fromid)')\n",
    "\t\tself.dbcommit()\n",
    "\n",
    "\tdef calculatepagerank(self,iterations=20):\n",
    "\t\t# clear out the current page rank tables\n",
    "\t\tself.con.execute('drop table if exists pagerank')\n",
    "\t\tself.con.execute('create table pagerank(urlid primary key,score)')\n",
    "\t\t\n",
    "\t\t# initialize every url with a page rank of 1\n",
    "\t\tfor (urlid,) in self.con.execute('select rowid from urllist'):\n",
    "\t\t\tself.con.execute('insert into pagerank(urlid,score) values (%d,1.0)' % urlid)\n",
    "\t\tself.dbcommit()\n",
    "\t\t\n",
    "\t\tfor i in range(iterations):\n",
    "\t\t\tprint (\"Iteration %d\" % (i))\n",
    "\t\t\tfor (urlid,) in self.con.execute('select rowid from urllist'):\n",
    "\t\t\t\tpr=0.15\n",
    "\t\t\t\t\n",
    "\t\t\t\t# Loop through all the pages that link to this one\n",
    "\t\t\t\tfor (linker,) in self.con.execute(\n",
    "\t\t\t\t'select distinct fromid from link where toid=%d' % urlid):\n",
    "\t\t\t\t\t# Get the page rank of the linker\n",
    "\t\t\t\t\tlinkingpr=self.con.execute(\n",
    "\t\t\t\t\t'select score from pagerank where urlid=%d' % linker).fetchone()[0]\n",
    "\n",
    "\t\t\t\t\t# Get the total number of links from the linker\n",
    "\t\t\t\t\tlinkingcount=self.con.execute(\n",
    "\t\t\t\t\t'select count(*) from link where fromid=%d' % linker).fetchone()[0]\n",
    "\t\t\t\t\tpr+=0.85*(linkingpr/linkingcount)\n",
    "\t\t\t\tself.con.execute(\n",
    "\t\t\t\t'update pagerank set score=%f where urlid=%d' % (pr,urlid))\n",
    "\t\t\tself.dbcommit()\n",
    "\n",
    "class searcher:\n",
    "\tdef __init__(self,dbname):\n",
    "\t\tself.con=sqlite.connect(dbname)\n",
    "\n",
    "\tdef __del__(self):\n",
    "\t\tself.con.close()\n",
    "\n",
    "\tdef getmatchrows(self,q):\n",
    "\t\t# Strings to build the query\n",
    "\t\tfieldlist='w0.urlid'\n",
    "\t\ttablelist=''  \n",
    "\t\tclauselist=''\n",
    "\t\twordids=[]\n",
    "\n",
    "\t\t# Split the words by spaces\n",
    "\t\twords=q.split(' ')  \n",
    "\t\ttablenumber=0\n",
    "\n",
    "\t\tfor word in words:\n",
    "\t\t\t# Get the word ID\n",
    "\t\t\twordrow=self.con.execute(\n",
    "\t\t\t\"select rowid from wordlist where word='%s'\" % word).fetchone()\n",
    "\t\t\tif wordrow!=None:\n",
    "\t\t\t\twordid=wordrow[0]\n",
    "\t\t\t\twordids.append(wordid)\n",
    "\t\t\t\tif tablenumber>0:\n",
    "\t\t\t\t\ttablelist+=','\n",
    "\t\t\t\t\tclauselist+=' and '\n",
    "\t\t\t\t\tclauselist+='w%d.urlid=w%d.urlid and ' % (tablenumber-1,tablenumber)\n",
    "\t\t\t\tfieldlist+=',w%d.location' % tablenumber\n",
    "\t\t\t\ttablelist+='wordlocation w%d' % tablenumber      \n",
    "\t\t\t\tclauselist+='w%d.wordid=%d' % (tablenumber,wordid)\n",
    "\t\t\t\ttablenumber+=1\n",
    "\n",
    "        # Create the query from the separate parts\n",
    "\t\ttry:\n",
    "\t\t\tfullquery='select %s from %s where %s' % (fieldlist,tablelist,clauselist)\n",
    "\t\t\tprint ('fullquery:',fullquery)\n",
    "\n",
    "\t\t\tcur=self.con.execute(fullquery)\n",
    "\t\t\trows=[row for row in cur]\n",
    "\n",
    "\t\t\treturn rows,wordids\n",
    "\t\texcept:\n",
    "\t\t\treturn [],wordids\n",
    "\n",
    "\tdef getscoredlist(self,rows,wordids):\n",
    "\t\ttotalscores=dict([(row[0],0) for row in rows])\n",
    "\n",
    "\t\t# This is where we'll put our scoring functions\n",
    "\t\tweights=[(1.0,self.locationscore(rows)), \n",
    "\t\t\t\t\t\t (1.0,self.frequencyscore(rows)),\n",
    "\t\t\t\t\t\t (1.0,self.pagerankscore(rows)),\n",
    "\t\t\t\t\t\t (1.0,self.linktextscore(rows,wordids)),\n",
    "\t\t\t\t\t\t (5.0,self.nnscore(rows,wordids))]\n",
    "\t\tfor (weight,scores) in weights:\n",
    "\t\t\tfor url in totalscores:\n",
    "\t\t\t\ttotalscores[url]+=weight*scores[url]\n",
    "\n",
    "\t\treturn totalscores\n",
    "\n",
    "\tdef geturlname(self,id):\n",
    "\t\treturn self.con.execute(\n",
    "\t\t\"select url from urllist where rowid=%d\" % id).fetchone()[0]\n",
    "\n",
    "\tdef query(self,q):\n",
    "\t\trows,wordids=self.getmatchrows(q)\n",
    "\t\tscores=self.getscoredlist(rows,wordids)\n",
    "\t\trankedscores=[(score,url) for (url,score) in scores.items()]\n",
    "\t\trankedscores.sort()\n",
    "\t\trankedscores.reverse()\n",
    "\t\tfor (score,urlid) in rankedscores[0:10]:\n",
    "\t\t\tprint( '%f\\t%s' % (score,self.geturlname(urlid)))\n",
    "\t\treturn wordids,[r[1] for r in rankedscores[0:10]]\n",
    "\n",
    "\tdef normalizescores(self,scores,smallIsBetter=0):\n",
    "\t\tvsmall=0.00001 # Avoid division by zero errors\n",
    "\t\tif smallIsBetter:\n",
    "\t\t\tminscore=min(scores.values())\n",
    "\t\t\treturn dict([(u,float(minscore)/max(vsmall,l)) for (u,l) in scores.items()])\n",
    "\t\telse:\n",
    "\t\t\tmaxscore=max(scores.values())\n",
    "\t\t\tif maxscore==0: maxscore=vsmall\n",
    "\t\t\treturn dict([(u,float(c)/maxscore) for (u,c) in scores.items()])\n",
    "\n",
    "\tdef frequencyscore(self,rows):\n",
    "\t\tcounts=dict([(row[0],0) for row in rows])\n",
    "\t\tfor row in rows: counts[row[0]]+=1\n",
    "\t\treturn self.normalizescores(counts)\n",
    "\n",
    "\tdef locationscore(self,rows):\n",
    "\t\tlocations=dict([(row[0],1000000) for row in rows])\n",
    "\t\tfor row in rows:\n",
    "\t\t\tloc=sum(row[1:])\n",
    "\t\t\tif loc<locations[row[0]]: locations[row[0]]=loc\n",
    "\t\t\n",
    "\t\treturn self.normalizescores(locations,smallIsBetter=1)\n",
    "\n",
    "\tdef distancescore(self,rows):\n",
    "\t\t# If there's only one word, everyone wins!\n",
    "\t\tif len(rows[0])<=2: return dict([(row[0],1.0) for row in rows])\n",
    "\n",
    "\t\t# Initialize the dictionary with large values\n",
    "\t\tmindistance=dict([(row[0],1000000) for row in rows])\n",
    "\n",
    "\t\tfor row in rows:\n",
    "\t\t\tdist=sum([abs(row[i]-row[i-1]) for i in range(2,len(row))])\n",
    "\t\t\tif dist<mindistance[row[0]]: mindistance[row[0]]=dist\n",
    "\t\treturn self.normalizescores(mindistance,smallIsBetter=1)\n",
    "\n",
    "\tdef inboundlinkscore(self,rows):\n",
    "\t\tuniqueurls=dict([(row[0],1) for row in rows])\n",
    "\t\tinboundcount=dict([(u,self.con.execute('select count(*) from link where toid=%d' % u).fetchone()[0]) for u in uniqueurls])   \n",
    "\t\treturn self.normalizescores(inboundcount)\n",
    "\n",
    "\tdef linktextscore(self,rows,wordids):\n",
    "\t\tlinkscores=dict([(row[0],0) for row in rows])\n",
    "\t\tfor wordid in wordids:\n",
    "\t\t\tcur=self.con.execute('select link.fromid,link.toid from linkwords,link where wordid=%d and linkwords.linkid=link.rowid' % wordid)\n",
    "\t\t\tfor (fromid,toid) in cur:\n",
    "\t\t\t\tif toid in linkscores:\n",
    "\t\t\t\t\tpr=self.con.execute('select score from pagerank where urlid=%d' % fromid).fetchone()[0]\n",
    "\t\t\t\t\tlinkscores[toid]+=pr\n",
    "\t\tmaxscore=max(linkscores.values())\n",
    "\t\tnormalizedscores=dict([(u,float(l)/maxscore) for (u,l) in linkscores.items()])\n",
    "\t\treturn normalizedscores\n",
    "\n",
    "\tdef pagerankscore(self,rows):\n",
    "\t\tpageranks=dict([(row[0],self.con.execute('select score from pagerank where urlid=%d' % row[0]).fetchone()[0]) for row in rows])\n",
    "\t\tmaxrank=max(pageranks.values())\n",
    "\t\tnormalizedscores=dict([(u,float(l)/maxrank) for (u,l) in pageranks.items()])\n",
    "\t\treturn normalizedscores\n",
    "\n",
    "\tdef nnscore(self,rows,wordids):\n",
    "\t\t# Get unique URL IDs as an ordered list\n",
    "\t\turlids=[urlid for urlid in dict([(row[0],1) for row in rows])]\n",
    "\t\tnnres=mynet.getresult(wordids,urlids)\n",
    "\t\tscores=dict([(urlids[i],nnres[i]) for i in range(len(urlids))])\n",
    "\t\treturn self.normalizescores(scores)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "fullquery: select w0.urlid from  where \n",
      "fullquery: select w0.urlid from  where \n"
     ]
    },
    {
     "ename": "ValueError",
     "evalue": "min() arg is an empty sequence",
     "output_type": "error",
     "traceback": [
      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[1;31mValueError\u001b[0m                                Traceback (most recent call last)",
      "\u001b[1;32m<ipython-input-4-d921e6548378>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m      5\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m      6\u001b[0m \u001b[0me\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mgetmatchrows\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mq\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 7\u001b[1;33m \u001b[0me\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mquery\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mq\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m",
      "\u001b[1;32m<ipython-input-2-8f21cf039867>\u001b[0m in \u001b[0;36mquery\u001b[1;34m(self, q)\u001b[0m\n\u001b[0;32m    247\u001b[0m         \u001b[1;32mdef\u001b[0m \u001b[0mquery\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mq\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    248\u001b[0m                 \u001b[0mrows\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mwordids\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mgetmatchrows\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mq\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 249\u001b[1;33m                 \u001b[0mscores\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mgetscoredlist\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mrows\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mwordids\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m    250\u001b[0m                 \u001b[0mrankedscores\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mscore\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0murl\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mfor\u001b[0m \u001b[1;33m(\u001b[0m\u001b[0murl\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mscore\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mscores\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mitems\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    251\u001b[0m                 \u001b[0mrankedscores\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0msort\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32m<ipython-input-2-8f21cf039867>\u001b[0m in \u001b[0;36mgetscoredlist\u001b[1;34m(self, rows, wordids)\u001b[0m\n\u001b[0;32m    230\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    231\u001b[0m                 \u001b[1;31m# This is where we'll put our scoring functions\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 232\u001b[1;33m \t\tweights=[(1.0,self.locationscore(rows)), \n\u001b[0m\u001b[0;32m    233\u001b[0m                                                  \u001b[1;33m(\u001b[0m\u001b[1;36m1.0\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mfrequencyscore\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mrows\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    234\u001b[0m                                                  \u001b[1;33m(\u001b[0m\u001b[1;36m1.0\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mpagerankscore\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mrows\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32m<ipython-input-2-8f21cf039867>\u001b[0m in \u001b[0;36mlocationscore\u001b[1;34m(self, rows)\u001b[0m\n\u001b[0;32m    276\u001b[0m                         \u001b[1;32mif\u001b[0m \u001b[0mloc\u001b[0m\u001b[1;33m<\u001b[0m\u001b[0mlocations\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mrow\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m:\u001b[0m \u001b[0mlocations\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mrow\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mloc\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    277\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 278\u001b[1;33m                 \u001b[1;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mnormalizescores\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mlocations\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0msmallIsBetter\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;36m1\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m    279\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    280\u001b[0m         \u001b[1;32mdef\u001b[0m \u001b[0mdistancescore\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mrows\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32m<ipython-input-2-8f21cf039867>\u001b[0m in \u001b[0;36mnormalizescores\u001b[1;34m(self, scores, smallIsBetter)\u001b[0m\n\u001b[0;32m    258\u001b[0m                 \u001b[0mvsmall\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;36m0.00001\u001b[0m \u001b[1;31m# Avoid division by zero errors\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    259\u001b[0m                 \u001b[1;32mif\u001b[0m \u001b[0msmallIsBetter\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 260\u001b[1;33m                         \u001b[0mminscore\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mmin\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mscores\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m    261\u001b[0m                         \u001b[1;32mreturn\u001b[0m \u001b[0mdict\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mu\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mfloat\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mminscore\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m/\u001b[0m\u001b[0mmax\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mvsmall\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0ml\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mfor\u001b[0m \u001b[1;33m(\u001b[0m\u001b[0mu\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0ml\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mscores\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mitems\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    262\u001b[0m                 \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;31mValueError\u001b[0m: min() arg is an empty sequence"
     ]
    }
   ],
   "source": [
    "e=searcher('searchindex.db')\n",
    "\n",
    "q='python 模板'\n",
    "#q.split(' ')\n",
    "\n",
    "e.getmatchrows(q)\n",
    "e.query(q)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.4"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
