#coding=utf-8
import urllib
import urllib2
from bs4 import BeautifulSoup
import re
import codecs
import socket
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

socket.setdefaulttimeout(10)
baseurl="http://poi86.com"
user_agent = 'Mozilla/5.0 (compatible; Baiduspider/2.0; +http://www.baidu.com/search/spider.html)'
headers = { 'User-Agent' : user_agent }
a=codecs.open("result.txt", "w", "utf8")
b=codecs.open("error.txt", "w", "utf8")
c=codecs.open("page_error.txt", "w", "utf8")
def handler(signum, frame):
    raise AssertionError
def getOnePOI(url):
    try:
       request = urllib2.Request(url,headers = headers)
       response = urllib2.urlopen(request)
       content=response.read().decode("utf-8")
       soup=BeautifulSoup(content)
       items=soup.select(".list-group-item")
       name=soup.select(".panel-heading h1")[0].text
       a.write(name+u" ")
       for i in items:
           a.write(i.getText().split(u":")[1]+u" ")
       a.write(u"\n")
       print(url)
    except Exception as e:
        b.write(url+u"\n")
        getOnePOI(url)
        print(e.message)
        return
def getDistrictOne(url):
    try:
       request = urllib2.Request(url,headers = headers)
       response = urllib2.urlopen(request)
       content=response.read().decode("utf-8")
       soup=BeautifulSoup(content)
       trs=soup.select("tr")
       for tr in trs:
           tds=tr.select("td")
           if len(tds)>=4:
               if tds[3].text.find(u"\u516c\u4ea4\u8f66\u7ad9")>=0:
                   getOnePOI(baseurl+tds[0].select("a")[0].get("href"))
               # a.write(tds[0].text+u" "+tds[1].text+u" "+tds[3].text+u"\n")
       # urls=[i.select("a")[0].get("href") for i in trs if len(i.select("a"))>0]
       # for i in urls:
       #     tds=i.
           # getOnePOI(baseurl+i)
    except Exception as e:
        c.write(url+u"\n")
        getDistrictOne(url)
        print(e.message)
        return
def getDistrict(url):
   try:
       request = urllib2.Request(url,headers = headers)
       response = urllib2.urlopen(request)
       content=response.read().decode("utf-8")
       soup=BeautifulSoup(content)
       num=0
       pagearea=soup.select(".disabled")
       if len(pagearea)>1:
           numtext=pagearea[1].getText()
           num=int(numtext.split("/")[1])
       if num==0:
            return
       for i in range(num+1):
           urli=re.sub(re.compile("\d\.html"),str(i)+".html",url)
           print(urli)
           getDistrictOne(urli)
   except Exception as e:
        c.write(url+u"\n")
        getDistrict(url)
        print(e.message)
        return
def getPOI(url):
    try:
        request = urllib2.Request(url,headers = headers)
        response = urllib2.urlopen(request)
        content=response.read().decode("utf-8")
        soup=BeautifulSoup(content)
        list=soup.find_all(href=re.compile('/poi/amap/district/'))
        urls=[i.get("href") for i in list]
        for i in range(len(urls)):
            if i<1:
                continue
            print(urls[i])
            getDistrict(baseurl+urls[i])
    except Exception as e:
        print(e.message)
        return
if __name__=="__main__":
    #getDistrict("http://www.poi86.com/poi/amap/district/420111/1.html")
    getPOI("http://www.poi86.com/poi/amap/city/110100.html")
    a.close()
    b.close()
    c.close()

