from bs4 import BeautifulSoup
import os
import urllib.request
import urllib.parse

base_url = "http://www.damai.cn/"

def get_lowest_price(url):
	
	res = urllib.request.urlopen(url)

	html = res.read().decode()
	soup = BeautifulSoup(html)

	dds = soup.select('dd[id="price"]')
	assert(len(dds) == 1)

	items = dds[0].find_all('a')
	for item in items:
		if item.attrs['class'][0] != 'grey':
			return item.attrs['price']
	
	return None

def more_page(param_dict):

	url = "http://www.damai.cn/ajax.aspx"

	opener = urllib.request.FancyURLopener()

	opener.addheaders = [
	('Accept', 'application/json, text/javascript, */*'),
	('Accept-Charset', 'GBK,utf-8;q=0.7,*;q=0.3'),
	('Accept-Encoding', 'gzip,deflate,sdch'),
	('Accept-Language', 'en-US,en;q=0.8,da;q=0.6'),
	('Connection', 'keep-alive'),
	#Content-Length', 59
	('Content-Type', 'application/x-www-form-urlencoded'),
	('Cookie', 'popup_404=1; page_tran_time=640.6291; popup_409=1; DaMaiTicketHistory=ProList=38599%40%e2%80%9c%e6%9c%89%e4%b8%80%e7%a7%8d%e7%b2%be%e7%a5%9e%e5%8f%ab%e2%80%9d%e8%90%a7%e6%95%ac%e8%85%be2012%e4%b8%96%e7%95%8c%e5%b7%a1%e5%9b%9e%e6%bc%94%e5%94%b1%e4%bc%9a%c2%b7%e4%b8%8a%e6%b5%b7Encore%e5%9c%ba%40sh%7c40408%40%e5%b4%94%e5%81%a5%e2%80%9c%e8%93%9d%e8%89%b2%e9%aa%a8%e5%a4%b4%e2%80%9d2012%e5%b7%a1%e5%9b%9e%e6%bc%94%e5%94%b1%e4%bc%9a%e4%b8%8a%e6%b5%b7%e7%ab%99%40sh%7c42405%40%e3%80%8c%e7%be%8e%e6%b1%81%e6%ba%90%e3%80%8dfeel+free+feel+music%e9%99%88%e5%a5%95%e8%bf%852012%e4%b8%8a%e6%b5%b7%e6%bc%94%e5%94%b1%e4%bc%9a%40sh; jiathis_rdc=%7B%22http%3A//www.damai.cn/ticket_42405.html%22%3A1537233503%2C%22http%3A//www.damai.cn/ticket_40408.html%22%3A118%7C1354452728662%2C%22http%3A//www.damai.cn/ticket_38599.html%22%3A%22464%7C1354454342093%22%7D; cpSTAT_ok_pages=12; cpSTAT_ok_times=1'),
	('Host', 'www.damai.cn'),
	('Origin' ,'http://www.damai.cn'),
	('Referer', 'http://www.damai.cn/sh/Perform-1/'),
	('User-Agent', 'Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.95 Safari/537.11'),
	('X-Requested-With', 'XMLHttpRequest'),
	]

	postdata = urllib.parse.urlencode({'type': 4, 'cityID': 872, 'categoryID': 2, 'pageIndex': 2, 'isText': 0, 'order': 0})
	response = opener.open(url, postdata) 

	content = response.read().decode()

	print(content)
	opener.close()


def parse_html(html):
	more_page('s')
	return
	
	soup = BeautifulSoup(html)
	result_list = []

	uls = soup.select('ul[id="performList"]')

	assert(len(uls) == 1)

	items = uls[0].find_all('li')
	for item in items:
		result = {}

		url = item.find_all('a')
		result['url'] = base_url + url[0]['href']
		result['title'] = url[0]['title']

		dates = item.select('dd[class="date"]')
		result['date'] = dates[0].get_text().replace('时间：', '')

		places = item.select('dd[class="venue"]')
		result['place'] = places[0].get_text().replace('场馆：', '')
		result['lowest_avaiable_price'] = get_lowest_price(result['url']) 
		result_list.append(result)
		#print(item.get_text())
		print(result)

	return result_list

def get_concerts(city):
	
	url = "http://www.damai.cn/sh/" + urllib.parse.quote("Perform-1")

	res = urllib.request.urlopen(url)

	html = res.read().decode()

	return parse_html(html)
