# -*- coding: utf-8 -*-
import os     
import MySQLdb  
from scrapy.exceptions import DropItem
import sys
reload(sys)
sys.setdefaultencoding("utf-8")

# INDEX = 1
# DATE = "2015-02-26"

class ArxivTable(object):

    def __init__(self):  
        try:  
            self.db = MySQLdb.connect(host="127.0.0.1", user="root", passwd="", port=3306, db="overfitting",  charset="utf8")  
            self.cursor = self.db.cursor()  
            print "Connect to db successfully!"  

        except:  
            print "Fail to connect to db!" 

    def process_item(self, item):

        param = (item['arxiv_no'], item['arxiv_name'], item['arxiv_url'], item['create_time'])  
        sql = "insert into ask_arxiv (arxiv_no,arxiv_name,arxiv_url,create_time) values(%s,%s,%s,%s)"  
        return self.cursor.execute(sql, param)

    def has_crawled(self, timestamp):
        sql = "select * from ask_arxiv_crawled where crawl_time = " + str(timestamp);
        return self.cursor.execute(sql)

    def crawled_thetime(self, timestamp):
        sql = "insert into ask_arxiv_crawled (crawl_time) values (" + str(timestamp) + ")"
        return self.cursor.execute(sql)

    def close_db(self):
        self.db.commit() 
        self.db.close 
        print("Done")  