'''
Created on 2012. 11. 12.

@author: jm.kong
'''
from bs4 import BeautifulSoup
import MySQLdb
import Queue
import threading
import time
import urllib2
import gc


#seqs = [i for i in range(512798, 512800 + 1)]
seqs = [i for i in range(512798, 561059 + 1)]

seq_queue = Queue.Queue()
html_queue = Queue.Queue()
sql_queue = Queue.Queue()
        

class ThreadUrl(threading.Thread):
    '''Threaded URL grab'''
    def __init__(self, queue, out_queue):
        threading.Thread.__init__(self)
        self.queue = queue
        self.out_queue = out_queue   
    
    def run(self):
        while True:
            # Get URL from queue
            seq = self.queue.get()
            url = 'http://www.hani.co.kr/arti/%d.html' % seq            
            
            try:
                # Read web page using URL            
                page = urllib2.urlopen(url, timeout=10)
                chunk = page.read()
                        
                # Put the web page into queue
                self.out_queue.put((seq, chunk))
            except Exception as e:
                print e
            
            finally:
                # Send signal to queue to notify job is finished            
                self.queue.task_done()
                    
            
class ThreadParser(threading.Thread):
    '''Mining the page'''
    def __init__(self, queue, out_queue):
        threading.Thread.__init__(self)
        self.queue = queue        
        self.out_queue = out_queue
    
    def run(self):
        while True:                        
            # Get page from queue            
            (seq, chunk) = self.queue.get()
                            
            try:    
                # Interpret the page            
                soup = BeautifulSoup(chunk)                
                # Get date
                date_area = soup.find('p', attrs={'class':'date'})
                                
                date = date_area.span.get_text()[5:15]
                month = date[5:7]
                day = date[8:10]
                
                # Get category
                article = soup.find('div', attrs={'class':'article-category-title'})
                cate = article.find('strong').get_text()             
                                
                # Get title
                article = article.find_all('h3')[1]
                [text.extract() for text in article()]
                title = article.get_text().replace("'", "''")  
                                
                # Get contents
                article = soup.find('div', attrs={'class':'article-contents'})                        
                [text.extract() for text in article()]            
                contents = article.get_text().replace("'", "''")
                                 
                sql = "insert into article(seq, month, day, cate, title, contents) values (%d, '%s', '%s', '%s', '%s', '%s')" % \
                (seq, month.encode('utf-8'), day.encode('utf-8'), cate.encode('utf-8'), title.encode('utf-8'), contents.encode('utf-8'))
                
                self.out_queue.put(sql)
                              
            except:
                pass
                
            finally:
                # Send signal to queue to notify job is finished
                self.queue.task_done()    
              
                            
class ThreadSql(threading.Thread):
    def __init__(self, queue):
        threading.Thread.__init__(self)
        self.queue = queue
        self.db = MySQLdb.connect('127.0.0.1', 'root', 'cjswoek', 'trends', charset='utf8', use_unicode=True)
        self.cursor = self.db.cursor()
        
    def __del__(self):
        self.db.close()    
        gc.collect()
        
    def run(self):
        while True:
            sql = self.queue.get()
            try:
                self.cursor.execute(sql)
                self.db.commit()
                
            except Exception as e:
                print e                        
                self.db.rollback()
                
            finally:
                self.queue.task_done()



def main():
    # Make thread pool
    for _ in range(10):
        t = ThreadUrl(seq_queue, html_queue)
        t.setDaemon(True)
        t.start()
    
    for _ in range(10):
        pt = ThreadParser(html_queue, sql_queue)
        pt.setDaemon(True)
        pt.start()
        
    st = ThreadSql(sql_queue)
    st.setDaemon(True)
    st.start()
            
    for seq in seqs:
        seq_queue.put(seq)            
            
    # Wait all thread will be finished
    seq_queue.join()
    html_queue.join()
    sql_queue.join()
    
if __name__ == '__main__':
    start = time.time()
    main()    
    print 'Elapsed Time : %s' % (time.time() - start)
