__author__ = 'Vivienfanghua'
# -*- coding:utf-8 -*-
import urllib
import urllib2
from bs4 import BeautifulSoup
import re
import codecs
import socket

socket.setdefaulttimeout(10)
baseurl="http://poi86.com"
user_agent = 'Mozilla/5.0 (compatible; Baiduspider/2.0; +http://www.baidu.com/search/spider.html)'
headers = { 'User-Agent' : user_agent }
a=codecs.open("D://output.txt", "w", "utf8")
b=codecs.open("D://error_one.txt", "w", "utf8")
c=codecs.open("D://error_page.txt", "w", "utf8")
def handler(signum, frame):
    raise AssertionError
def getOnePOI(url):
    try:
       request = urllib2.Request(url,headers = headers)
       response = urllib2.urlopen(request)
       content=response.read().decode("utf-8")
       soup=BeautifulSoup(content)
       items=soup.select(".list-group-item")
       for i in items:
           a.write(i.getText().split(u":")[1]+u" ")
       a.write(u"\n")
       print url
    except Exception,e:
        b.write(url+u"\n")
        getOnePOI(url)
        print e.message
        return
def getDistrictOne(url):
    try:
       request = urllib2.Request(url,headers = headers)
       response = urllib2.urlopen(request)
       content=response.read().decode("utf-8")
       soup=BeautifulSoup(content)
       trs=soup.select("tr")
       urls=[i.select("a")[0].get("href") for i in trs if len(i.select("a"))>0]
       for i in urls:
           getOnePOI(baseurl+i)
    except Exception,e:
        c.write(url+u"\n")
        getDistrictOne(url)
        print e.message
        return
def getDistrict(url):
   try:
       request = urllib2.Request(url,headers = headers)
       response = urllib2.urlopen(request)
       content=response.read().decode("utf-8")
       soup=BeautifulSoup(content)
       num=0
       pagearea=soup.select(".disabled")
       if len(pagearea)>1:
           numtext=pagearea[1].getText()
           num=int(numtext.split("/")[1])
       if num==0:
            return
       for i in range(1,num+1):
           urli=re.sub(re.compile("\d\.html"),str(i)+".html",url)
           print urli
           getDistrictOne(urli)
   except Exception,e:
        c.write(url+u"\n")
        getDistrict(url)
        print e.message
        return
def getPOI(url):
    try:
        request = urllib2.Request(url,headers = headers)
        response = urllib2.urlopen(request)
        content=response.read().decode("utf-8")
        soup=BeautifulSoup(content)
        list=soup.find_all(href=re.compile('/poi/amap/district/'))
        urls=[i.get("href") for i in list]
        for i in urls:
            print i
            getDistrict(baseurl+i)
    except Exception,e:
        print e.message
        return
if __name__=="__main__":
    getDistrict("http://www.poi86.com/poi/amap/district/420111/1.html")
    a.close()
    b.close()
    c.close()
    # getPOI("http://poi86.com/poi/amap/city/330100.html")
