#!/usr/bin/python
#coding:uft-8

import re
import urllib2
import json
from bs4 import BeautifulSoup
import MySQLdb

def OpenPage(url):
    Myheaders = {}
    req = urllib2.Request(url, headers = Myheaders)
    f = urllib2.urlopen(req)
    data = f.read()
    return data

def ParseMainPage(page):
    data = json.loads(page)
    rows = data["rows"]
    prefix = "url&JobId="
    return urllist[prefix + item['Id'] for item in rows]

def ParseDetailPage(page):
    data = json.loads(page)
    if data["Succeed"] == False:
        print "error"
        return
    return data['Data']


if __name__ == "__main__":
    url = "web url"
    mainPage = OpenPage(url)
    urlList = ParseMainPage(mainPage)
    for item in urlList:
        print "Crawler url = " + item
        detailPage = OpenPage(item)
        data = ParseDatailPage(datailPage)
        WriteDataTofile("\n".join(data))

    print "Crawler done"














































































