#!/usr/bin/python
# -*- coding: utf-8 -*-

#$ -l h_rt=0:30:00
#$ -l virtual_free=100M
#$ -o $HOME/weather.out
#$ -e $HOME/weather.err
#$ -N weather

import sys
reload(sys)
sys.path.append('/home/artem/pywikipedia')
#sys.path.append('/home/artem/Dropbox/wiki/bot/pywikipedia')
sys.setdefaultencoding('utf-8')

import re # regular expressions

import wikipedia # pywikipediabot

import calendar # dates
import time # for sleep() function

# gets an xml-file with forecast information via World Weather Online API
def download_xml_wwo(city, f):
	import urllib2
	
	url = 'http://api.worldweatheronline.com/free/v1/weather.ashx?q=' + city + '&format=xml&num_of_days=5&key=drddq75q5yzry5snurxpf3tx'
	
	try:
		page = urllib2.urlopen(url)
	except urllib2.HTTPError, e:
		print 'Ошибка при получении данных для города ' + city + '\nurllib2.HTTPError: ' + str(e.code)
	except urllib2.URLError, e:
		print 'Ошибка при получении данных для города ' + city + '\nurllib2.HTTPError: ' + str(e.reason)

	f.write(page.read())

def download_xml_google(city, f):
	import urllib2
	
	url = 'http://www.google.com/ig/api?weather=' + city + '&oe=utf8&hl=ru'
	
	while(True):
		try:
			page = urllib2.urlopen(url)
			break
		except urllib2.HTTPError, e:
			print 'Ошибка при получении данных для города ' + city + '\nurllib2.HTTPError: ' + str(e.code)

	f.write(page.read())

# gets cities list from wikinews pages
def get_cities():
	cities = []
	cities_rus = []

	page = wikipedia.Page(site, u'Шаблон:ПогодаН/Список городов')
	page_text = page.get()
	cities = re.findall('(?<=\|).+', page_text)
	cities_rus = re.findall('.+(?=\|)', page_text)

	return [cities, cities_rus]

# gets old data from Wikinews page, needed if new data are unavailable
def get_old_data(city):
	page = wikipedia.Page(site, u'Module:Weather/data')
	txt = re.findall(u'(?<=\[\''+city+u'\'\] = {).+?(?=},)', page.get(), flags=re.DOTALL)[0]
	print txt
	return [re.findall(u'(?<=temperature = ).+(?=,)', txt), re.findall(u'(?<=conditions = ).+(?=,)', txt), re.findall(u'(?<=maxtemp0 = ).+(?=,)', txt), re.findall(u'(?<=mintemp0 = ).+(?=,)', txt), re.findall(u'(?<=maxtemp1 = ).+(?=,)', txt), re.findall(u'(?<=mintemp1 = ).+(?=,)', txt), re.findall(u'(?<=maxtemp2 = ).+(?=,)', txt), re.findall(u'(?<=mintemp2 = ).+(?=,)', txt), re.findall(u'(?<=maxtemp3 = ).+(?=,)', txt), re.findall(u'(?<=mintemp3 = ).+(?=,)', txt)]

# gets new data from World Weather Online API
def get_data_wwo(cities):
	temperature = []
	conditions = []
	highs = [[], [], [], []]
	lows = [[], [], [], []]
	
	for i, city in enumerate(cities):
		print u'Получение новых данных для города ' + city
		time.sleep(0.4)
		
		# get an xml-file with forecast information via World Weather Online API
		f = open('/tmp/wfbtmp', 'w')
		download_xml_wwo(city.replace(' ', '+').encode('utf8'), f)
		f.close()
		
		import xml.etree.ElementTree as etree
		
		# parsing xml-file with ElementTree
		f = open('/tmp/wfbtmp', 'r')
		try:
			tree = etree.parse(f)

			for child in tree.getroot()[1]:
				if child.tag == 'weatherDesc':
					conditions.append(child.text)
				elif child.tag == 'temp_C':
					temperature.append(child.text)
			
			for d in range(4):
				for child in tree.getroot()[d+2]:
					if child.tag == 'tempMinC':
						lows[d].append(child.text)
					elif child.tag == 'tempMaxC':
						highs[d].append(child.text)
		except IndexError as e: # in the case of an error, use old data
			print 'Ошибка при получении или обработке данных для города ' + city  + '\nИспользую старые данные'
			old_data = get_old_data(cities_rus[i])
			conditions.append(old_data[0])
			temperature.append(old_data[1])
			for d in range(4):
				lows[d].append(old_data[2+2*d])
				highs[d].append(old_data[3+2*d])
		
		f.close()
	
	return [temperature, conditions, highs, lows]	

# gets new data from Google Weather API
def get_data_google(cities):
	temperature = []
	conditions = []
	highs = [[], [], [], []]
	lows = [[], [], [], []]
	for i, city in enumerate(cities):
#		print u'Получение новых данных для города ' + city
		
		# get an xml-file with forecast information via Google Weather API
		f = open('/tmp/wfbtmp', 'w')
		download_xml_google(city.replace(' ', '+').encode('utf8'), f)
		f.close()

		import xml.etree.ElementTree as etree

		# parsing xml-file with ElementTree
		f = open('/tmp/wfbtmp', 'r')
		try:
			tree = etree.parse(f)

			for child in tree.getroot()[0][1]:
				if child.tag == 'condition':
					conditions.append(child.attrib['data'])
				elif child.tag == 'temp_c':
					temperature.append(child.attrib['data'])
			
			for d in range(4):
				for child in tree.getroot()[0][d+2]:
					if child.tag == 'low':
						lows[d].append(child.attrib['data'])
					elif child.tag == 'high':
						highs[d].append(child.attrib['data'])
		except (etree.ParseError, IndexError) as e: # in the case of an error, use old data
			print 'Ошибка при получении или обработке данных для города ' + city + '\n' + str(e.code) + '\nИспользую старые данные'
			old_data = get_old_data(cities_rus[i])
			conditions.append(old_data[0])
			temperature.append(old_data[1])
			for d in range(4):
				lows[d].append(old_data[2+2*d])
				highs[d].append(old_data[3+2*d])			
		
		f.close()
		
	return [temperature, conditions, highs, lows]
		
# writes new data to Wikinews
def write_to_wn(cities, temperature, conditions, highs, lows):
	content = u'local p = {}\n\np = {\n'
	
	for i, city in enumerate(cities):
		content += u'\t[\'' + city + '\']' + u' = {\n'
		content += u'\t\ttemperature = ' + temperature[i] + ',\n'
		content += u'\t\tconditions = \'' + conditions[i] + '\',\n'
		for d in range(4):
			content += u'\t\tmaxtemp' + str(d) + ' = ' + highs[d][i] + ',\n'
			content += u'\t\tmintemp' + str(d) + ' = ' + lows[d][i] + ',\n'
		content += u'\t},\n'
	
	content += '}\n\nreturn p'
	
	wikipedia.setAction(u'Обновление прогноза погоды')
	page = wikipedia.Page(site, u'Module:Weather/data')
	page.put(content)


site = wikipedia.getSite()

cities = []
cities_rus = []
temperature = []
lows = [[], [], [], []]
highs = [[], [], [], []]
conditions = []

[cities, cities_rus] = get_cities()
#[cities, cities_rus] = [["Moscow"], ["Москва"]]
#for i, city in enumerate(cities):
#	wikipedia.output(city + u' ' + cities_rus[i])

[temperature, conditions, highs, lows] = get_data_wwo(cities)

for i, city in enumerate(cities):
	wikipedia.output(city + ' ' + temperature[i] + ' ' + conditions[i] + ' ' + highs[0][i] + ' ' + lows[0][i] + ' ' + highs[1][i] + ' ' + lows[1][i] + ' ' + highs[2][i] + ' ' + lows[2][i] + ' ' + highs[3][i] + ' ' + lows[3][i])

write_to_wn(cities_rus, temperature, conditions, highs, lows)

wikipedia.stopme()
