import os
from bs4 import BeautifulSoup
import requests
from twilio.rest import Client
import datetime
import re
import logging  
import sys
import codecs
import traceback 
'''天气类及其方法'''

class Weather():
	#天气的数据结构
	def __init__(self):
		self.timestamp='null'
		self.weather='null'
		self.tempreture='null'
		self.wind_force='null'
		self.wind_direction='null'
		
	
	def GetWebWeather(self):
		#记录调试信息的文件
		#f='/home/pi/remind/log.txt'
		f='log.txt'
		FILE=open(f,'a')
		logging.basicConfig(level=logging.DEBUG,format='%(asctime)s - %(message)s',filename=f,filemode='a')
		#logging.basicConfig(level=logging.DEBUG,format='%(asctime)s - %(message)s')
		#logging.disable(logging.CRITICAL)  #禁用调试信息
		#sys.stdout = codecs.getwriter("utf-8")(sys.stdout.detach())
		#获取系统时间
		dt=datetime.datetime.now()
		#转为字符串格式
		ReadableDate=dt.strftime('%m/%d %H:%M:%S')		
		#-------------------------------------------------------------------------
		#以下为爬虫部分
		#url = 'http://python123.io/ws/demo.html'
		url='http://www.weather.com.cn/weather1d/101020100.shtml#search'
		r = requests.get(url)
		r.encoding='utf-8'  #查看网页源代码，确定文字编码格式
		demo = r.text  # 服务器返回响应
		soup = BeautifulSoup(demo, "html.parser")  #得到BeautifulSoup对象
		"""
		demo 表示被解析的html格式的内容
		html.parser表示解析用的解析器
		"""
		#print(soup)  # 输出响应的html对象
		#print(soup.prettify())  # 使用prettify()格式化显示输出
		#得到BeautifulSoup对象后，一般通过BeautifulSoup类的基本元素来提取html中的内容
		#-------可以先运行上面这段看看指定的网页能不能爬--------------------------------------------#
		#下面开始提取html中的目标信息
		print('Extracting---')
		#此处查看网页源码灵活应对
		try:		
			print(sys.stdout.encoding)
			elems=soup.find_all(text=re.compile("\d\d:\d\d更新"))
			timestamp=elems[0]
			
			elems=soup.select('ul p[class="wea"]')  
			weather1=elems[0].getText()
			weather2=elems[1].getText()
			#logging.debug('tonight:'+tonight_weather.weather)
			#print(weather1,weather2)

			elems=soup.select('ul p[class="tem"] span')
			tempreture1=elems[0].getText()
			tempreture2=elems[1].getText()
			#print(tempreture1,tempreture2)


			elems=soup.select('ul p[class="win"] span')
			wind_force1=elems[0].getText()
			wind_direction1=elems[0].get('title')
			wind_force2=elems[1].getText()
			wind_direction2=elems[1].get('title')
			#print(wind_direction1,wind_force1)
			#print(wind_direction2,wind_force2)

			txt= 'update @'+timestamp+'\n'+\
			 'today->weather:'+weather1+'\n'+\
			 '>tempreture:'+tempreture1+'\n'+\
			 '>wind:'+wind_direction1+wind_force1+'\n'+\
			 'tomorrow->weather:'+weather2+'\n'+\
			 '>tempreture:'+tempreture2+'\n'+\
			 'wind:'+wind_direction2+wind_force2
			 
			logging.debug(txt)
			return txt
			
		except Exception as e:
			
			e_msg=traceback.format_exc()
			logging.debug(f'pasrse failed:{e_msg}')
			return f'pasrse failed:{e_msg}'
			
			
		finally:	
			FILE.close()

