# coding=UTF-8
'''
Created on 23 Apr 2012

@author: R
'''

import re
from base import processpage, search_term
from tools.BeautifulSoup import BeautifulSoup


def resolve_page(url,proxy):
    content=processpage(url,proxy)
    px=re.compile("pointx.(.*?)&").search(content)
    py=re.compile("pointy.(.*?)\"").search(content)
    px=search_term(px)
    py=search_term(py)
    stri=''+px+','+py+','
    
    soup=BeautifulSoup(content)
    
    ss=str(soup.findAll("td")).split()
    sw="".join(ss)
    dq=re.compile("地区：[^>]+?>(.*?)<").search(sw)
    szd=re.compile("所在地：[^>]+?>(.*?)<").search(sw)
    zmj=re.compile("总面积：[^>]+?>(.*?)<").search(sw)
    jzydmj=re.compile("建设用地面积：[^>]+?>(.*?)<").search(sw)
    ghjzmj=re.compile("规划建筑面积：[^>]+?>(.*?)<").search(sw)
    dzmj=re.compile("代征面积：[^>]+?>(.*?)<").search(sw)
    rjl=re.compile("容积率：[^>]+?>(.*?)<").search(sw)
    lhl=re.compile("绿化率：[^>]+?>(.*?)<").search(sw)
    sybl=re.compile("商业比例：[^>]+?>(.*?)<").search(sw)
    jzmd=re.compile("建筑密度：[^>]+?>(.*?)<").search(sw)
    xzgd=re.compile("限制高度：[^>]+?>(.*?)<").search(sw)
    crxs=re.compile("出让形式：[^>]+?>(.*?)<").search(sw)
    crnx=re.compile("出让年限：[^>]+?>(.*?)<").search(sw)
    wz=re.compile("位置：[^>]+?>(.*?)<").search(sw)
    sz=re.compile("四至：[^>]+?>(.*?)<").search(sw)
    ghyt=re.compile("规划用途：[^>]+?>(.*?)<").search(sw)
    stri=stri+search_term(dq)+","+search_term(szd)+","+search_term(zmj)+","+search_term(jzydmj)+","+search_term(ghjzmj)+","+search_term(dzmj)+","+search_term(rjl)+","+search_term(lhl)+","+search_term(sybl)+","+search_term(jzmd)+","+search_term(xzgd)+","+search_term(crxs)+","+search_term(crnx)+","+search_term(wz)+","+search_term(sz)+","+search_term(ghyt)+","
    
    jyzk=re.compile("交易状况：[^>]+?>(.*?)<").search(sw)
    jdf=re.compile("竞得方：[^>]+?>(.*?)<").search(sw)
    qsrq=re.compile("起始日期：[^>]+?>(.*?)<").search(sw)
    jzrq=re.compile("截止日期：[^>]+?>(.*?)<").search(sw)
    cjrq=re.compile("成交日期：[^>]+?>(.*?)<").search(sw)
    jydd=re.compile("交易地点：[^>]+?>(.*?)<").search(sw)
    qsj=re.compile("起始价：[^>]+?>(.*?)<").search(sw)
    cjj=re.compile("成交价：[^>]+?>(.*?)<").search(sw)
    lmdj=re.compile("楼面地价：[^>]+?>(.*?)<").search(sw)
    yjl=re.compile("溢价率：[^>]+?>(.*?)<").search(sw)
    zxdh=re.compile("咨询电话：[^>]+?>(.*?)<").search(sw)
    bzj=re.compile("保证金：[^>]+?>(.*?)<").search(sw)
    zxjjfd=re.compile("最小加价幅度：[^>]+?>(.*?)<").search(sw)
    stri=stri+search_term(jyzk)+","+search_term(jdf)+","+search_term(qsrq)+","+search_term(jzrq)+","+search_term(cjrq)+","+search_term(jydd)+","+search_term(qsj)+","+search_term(cjj)+","+search_term(lmdj)+","+search_term(yjl)+","+search_term(zxdh)+","+search_term(bzj)+","+search_term(zxjjfd)+","

    return stri


def run():
    infile = "..\\lists.txt"
    outfile = "..\\land.csv"
    fh_getl = open(infile,"r")
    fh_putd = open(outfile,"a")
    proxy="0"
    for lines in fh_getl:
        line = lines.split(',')
        ltitle = line[0]
        i=1
        while not line[i].startswith('http'):
            ltitle+=line[i]
            i+=1
        lurl = line[i]
        strg = '' + ltitle + ',' + lurl + ',' + resolve_page(lurl,proxy) + '\n'
        fh_putd.write(strg)
        print "done:"+lurl

if __name__ == '__main__':
    run()
    