import hashlib
from datetime import *

from sqlalchemy import Column, String, create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql import func
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.dialects.mysql import \
        BIGINT, BINARY, BIT, BLOB, BOOLEAN, CHAR, DATE, \
        DATETIME, DECIMAL, DECIMAL, DOUBLE, ENUM, FLOAT, INTEGER, \
        LONGBLOB, LONGTEXT, MEDIUMBLOB, MEDIUMINT, MEDIUMTEXT, NCHAR, \
        NUMERIC, NVARCHAR, REAL, SET, SMALLINT, TEXT, TIME, TIMESTAMP, \
        TINYBLOB, TINYINT, TINYTEXT, VARBINARY, VARCHAR, YEAR


#数据库相关操作  
Base = declarative_base()


class Table(Base):
    # 表的名字:
    __tablename__ = 'stackoverflow_html_detail'

    # 表的结构:
    id = Column(INTEGER, primary_key=True)
    url = Column(String(255), nullable=False)
    html = Column(MEDIUMTEXT)
    crawledTime = Column(DATETIME)
    urlMD5 = Column(String(255))
    pageMD5 = Column(String(255))
    history = Column(String(255))


class Pointers(Base):
    __tablename__ = 'pointers'
    id = Column(INTEGER, primary_key=True)
    table_name = Column(String(255))
    pointer = Column(INTEGER)
    created_at = Column(DATETIME)


class Url(Base):
    # 表的名字:
    __tablename__ = 'stackoverflow_url'

    # 表的结构:
    id = Column(INTEGER, primary_key=True)
    url = Column(String(255), nullable=False)
    timestamp = Column(BIGINT)
    extractedTime = Column(DATETIME)


engine = create_engine('mysql+mysqlconnector://root:root@localhost:3306/crawler') 
DBSession = sessionmaker(bind=engine)
Base.metadata.create_all(engine)


if __name__ == '__main__':
	# session = DBSession()
	# pointer = session.query(Pointers.pointer).filter(Pointers.table_name=='stackoverflow_url').one()[0]
	# url_num = session.query(func.max(Url.id)).one()[0]
	# url = session.query(Url.url).filter(Url.id==pointer+1).one()[0]
	# session.query(Pointers).filter(Pointers.table_name=='stackoverflow_url').update({Pointers.pointer:pointer+1})
	# session.commit()
	
