# -*- coding: UTF-8 -*- 
from BeautifulSoup import BeautifulSoup
import urllib2
from deal_info_web import *

def deal_city(tuan, city):
    (f_id, d_id, orig) = judge_begin(tuan, city)

    f_url = urllib2.urlopen('http://www.nuomi.com/%s' % (city))

    soup = BeautifulSoup(f_url.read())

    f_xml = open('%sxml/%s_%s.xml' % (get_home_dir(), tuan, city), 'w')
    
    f_xml.write('<deals>\n')
    
    deal_id = soup.find(attrs={'class':'renren'})['href'].replace('http://share.renren.com/share/buttonshare?link=http%3A%2F%2Fwww.nuomi.com%2F', '').replace('.html', '')
    if not judge(f_id, d_id, deal_id):
	f_xml.write('\t' + '<deal>\n')
	deal_info_source(f_xml, 'nuomi', '糯米网'.decode('utf-8'), 'http://www.nuomi.com/', 'http://www.nuomi.com/i/1FM1nZnU')

	f_xml.write('\t' * 2 + '<city>%s</city>\n' % city)

	deal_info_add(f_xml, 'id', deal_id, 2)
	deal_title = soup.find(attrs={'class':'deal-main'}).h1.contents[0]
	deal_info_add(f_xml, 'name', deal_title, 2)
	deal_info_cats(f_xml, deal_title)
	deal_vendor_name = ''
	deal_info_vendor(f_xml, deal_vendor_name)
	deal_info_time(f_xml)
	deal_value = soup.find(attrs={'class':'original'}).contents[0].replace('¥'.decode('utf-8'), '').replace(',', '')
	deal_info_add(f_xml, 'o_price', deal_value, 2)
	deal_price = str(float(deal_value) - float(soup.find(attrs={'class':'deal-discount'}).table('tr')[1]('td')[2].strong.contents[0].replace('¥'.decode('utf-8'), '').replace(',', '')))
	deal_info_add(f_xml, 'c_price', deal_price, 2)
	deal_info_discount_and_save_money(f_xml, deal_price, deal_value)
	deal_img = soup.find(attrs={'class':'product-pic'}).img['src']
	deal_info_img(f_xml, deal_img)
	deal_url = 'http://www.nuomi.com/%s.html' % (deal_id)
	deal_info_add(f_xml, 'url', deal_url, 2)

	f_xml.write('\t' + '</deal>\n')

    f_xml.write(orig[8 : ])

    f_xml.close()
    f_id.close()

    judge_end(tuan, city)



deal_city('nuomi', 'beijing')
deal_city('nuomi', 'shanghai')
deal_city('nuomi', 'hangzhou')
