#-*- coding:utf-8 -*-

from BeautifulSoup import BeautifulSoup
import gzip
from cStringIO import StringIO
import smtplib
from email.mime.text import MIMEText
from email.header import Header
import time
ISOTIMEFORMAT='%Y-%m-%d %X'

__author__ = 'liyuan_t'

import requests
content_list = []

def get_page(url = '' ):
    headers = {
        'Accept-Encoding':'gzip, deflate, sdch',
        'User-Agent':'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.80 Safari/537.36',
        "Accept":'Accept:text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
        'Accept-Language':'zh-CN,zh;q=0.8'
    }
    try:
        response = requests.get(url)
    except Exception,e:
        print e
        return
    if response.encoding == 'gzip':
        buf = StringIO(response.content)
        f = gzip.GzipFile(fileobj=buf)
        data = f.read()
        return data
    else:
        return response.content


def get_news():
    return get_page('http://news.cqut.edu.cn/Home/Type?type=1')

def get_notices():
    return get_page('http://news.cqut.edu.cn/Home/Module/6f09b7e2-e71f-44b5-9d33-d2d09bfdbb25')

def get_bumen():
    return get_page('http://news.cqut.edu.cn/Home/Module/e613ac6a-dc54-45f9-aab1-e9dfcfc528b0')

def get_jiaowu():
    return get_page('http://news.cqut.edu.cn/Home/Department/7aed4134-97ae-4bcc-b1fe-a58d227b097c')

def get_keyan():
    return get_page('http://news.cqut.edu.cn/Home/Department/2af1a233-f1a7-4bea-ac5d-20483d221249')

def get_notice2():
    return get_page('http://news.cqut.edu.cn/Home/Module/74DD1D56-78CF-4B6E-938B-B3DB996758E6')


def crawler(content):
    soup = BeautifulSoup(content.decode('utf-8'))
    div = soup.find('div',{'class':'list','style':'z-index: 500;'})

    for a in div.findAll('a',{'target':'_blank'}):
        try:
            content_list.append([a['title'],'http://news.cqut.edu.cn' + a['href']])
        except Exception,e:
            print e


def run():
    content_news = get_news()
    content_notice = get_notices()
    content_bumen = get_bumen()
    content_jiaowu = get_jiaowu()
    content_keyan = get_keyan()
    content_notice2 = get_notice2()
    if content_news:
        crawler(content_news)
        crawler(content_notice)
        crawler(content_bumen)
        crawler(content_jiaowu)
        crawler(content_keyan)
        crawler(content_notice2)
    return content_list

def send_main():
    news_list = run()
    sender = 'sender@sender.com'
    receiver = ['to@server.com']
    subject = u'News ' + time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
    smtp_setver = 'smtp.qq.com'
    username = 'sender@sender.com'
    password = 'sender_pwd'

    send_str = ''
    for news in news_list:
        try:
            send_str += '<a href = "'+news[1]+'" style = "text-decoration:none">'+news[0]+'</a><br />'
        except Exception,e:
            continue
    s = send_str.encode('GB18030')
    msg = MIMEText(s,'html','GB18030')
    msg['Subject'] = Header(subject,'utf-8')
    msg['To'] = ';'.join(receiver)
    smtp = smtplib.SMTP()
    smtp.connect(smtp_setver,25)
    smtp.login(username,password)
    smtp.sendmail(sender,receiver,msg.as_string())
    smtp.quit()

if __name__ == '__main__':
    send_main()