
import eurostat
import multiprocessing
import pickle
import zlib
from sqlalchemy import Column, String, create_engine,Boolean,Integer,LargeBinary
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base

Base = declarative_base()
engine = create_engine('postgresql://test:1qaz@WSX@localhost:5432/eurostat')

# 定义Maping对象:
class Eurostat_Table(Base):
    # 表的名字:
    __tablename__ = 'eurostat_table'

    # 表的结构:
    title = Column(String(400))
    code = Column(String(100), primary_key=True)
    type = Column(String(100))
    last_update_of_data = Column(String(100))
    last_update_of_data = Column(String(100))
    last_table_structure_change = Column(String(100))
    data_start = Column(String(100))
    data_end = Column(String(100))
    download_state = Column(Integer)

class Eurostat(Base):
    # 表的名字:
    __tablename__ = 'eurostats'
    code = Column(String(100), primary_key=True)
    data = Column(LargeBinary)

def fecth(code):
    try:
        DBSession = sessionmaker(bind=engine)
        session = DBSession()
        print("start get " + str(code) + " ...")
        e_item = Eurostat()
        e_item.code = code
        # e_item.data = eurostat.get_data_df(code, flags=False).to_json()
        e_item.data = zlib.compress(pickle.dumps(eurostat.get_data_df(code, flags=False).to_json()))

        print("get " + str(e_item.code) + " success...")
    except Exception as e:
        print(e)
        print("get " + str(e_item.code) + " error...")

    if e_item.data is not None and len(e_item.data) > 0:
        try:
            et = session.query(Eurostat_Table).filter(Eurostat_Table.code == code).first()
            et.download_state = 1
            session.add(e_item)
            session.commit()
            print("insert " + str(e_item.code) + " success...")
        except Exception as e:
            print("insert " + str(e_item.code) + " error...")
            print(e)
            session.rollback()
            item.download_state = 2
            session.commit()
        session.close()
        del(session)
        del(e_item)
        del(et)


if __name__ == '__main__':
    # 初始化数据库
    Base.metadata.create_all(engine)
    DBSession = sessionmaker(bind=engine)
    session = DBSession()

    # 获取eurostat所有表
    # toc = eurostat.get_toc()
    # for item in toc[1:]:
    #     if item[2] == 'folder':
    #         continue
    #     if 'demo' in item[1]:
    #         continue
    #     et_item = Eurostat_Table()
    #     et_item.title = item[0]
    #     et_item.code = item[1]
    #     et_item.type = item[2]
    #     et_item.last_update_of_data = item[3]
    #     et_item.last_table_structure_change =item[4]
    #     et_item.data_start = item[5]
    #     et_item.data_end = item[6]
    #     et_item.download_state =0
    #     try:
    #         session.add(et_item)
    #         session.commit()
    #         print("insert tables " + str(et_item.code) + " success...")
    #     except Exception as e:
    #         print("insert tables " + str(et_item.code) + " error...")
    #         print(e)
    #         session.rollback()
    #     del(et_item)
    #     session.close()

    # 下载所有表
    p = multiprocessing.Pool(processes = 4)
    for item in session.query(Eurostat_Table).filter(Eurostat_Table.download_state==0).all():

        p.apply_async(fecth, (item.code,))

    p.close()
    p.join()
    print("......finish.......")

