# coding=utf-8
import urllib
import urllib2
import MySQLdb
import time
from sgmllib import SGMLParser


def db_connection(**kwargs):
    user = kwargs.get('user', 'root')
    password = kwargs.get('password', 'root')
    host = kwargs.get('host', '127.0.0.1')
    port = kwargs.get('port', '3306')
    db = kwargs.get('db', None)
    charset = kwargs.get('charset', 'utf8')

    conn = MySQLdb.connect(user=user, passwd=password, host=host, charset=charset)

    return conn


class URLLister(SGMLParser):
	def reset(self):
		SGMLParser.reset(self)
		self.urls = []

	def start_a(self, attrs):
		href = [v for k, v in attrs if k=='href']
		if href:
			self.urls.extend(href)



if __name__ == '__main__':
    sourcebase = 'http://yuedukong.com/archive'
    # sourcebase = 'http://www.zilihangjian.net/archive'
    agent = 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.120 Safari/537.36'

    conn = db_connection()
    cursor = conn.cursor()

    try:
        conn.select_db("test")
    except:
        cursor.execute("CREATE DATABASE IF NOT EXISTS test")
    # cursor.execute("DROP TABLE IF EXISTS urls")
    # cursor.execute("CREATE TABLE IF NOT EXISTS urls(id int(11) AUTO_INCREMENT, url varchar(255),PRIMARY KEY (id))ENGINE=InnoDB DEFAULT CHARSET=utf8")

    # conn.commit() # 提交


    i = 0
    urls = []
    for year in range(2011, 2015):
        for month in range(01, 13):
            post_data = 'lite=1&month=' + str(year) + str(month)

            request = urllib2.Request(sourcebase, post_data)
            request.add_header('User-Agent', agent)

            opener = urllib2.build_opener()
            html_data = opener.open(request).read().decode('utf-8')

            parser = URLLister()
            parser.feed(html_data)
            parser.close()


            if i == 3:
                i = 0
                break
            if not len(parser.urls):
                i += 1
                continue
            time.sleep(1)
            print u'%s年%s月:' % (year, month)
            for url in parser.urls:
                # u = url.split('//')[1].split('/')
                # if u[0] != 'yuedukong.com' or len(u[-1]) != 8:
                    # continue
                urls.append(url.split('//')[1])


    for url in urls:
        print '.',
        cursor.execute("insert into urls(url) values('%s')" % url)
        conn.commit() # 提交

    cursor.close()
    conn.close()
