# -*- coding: UTF-8 -*-
import requests
import bs4
import json
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine, Column, Integer, String
from sqlalchemy.orm import sessionmaker
from config import DB_URI
#import MainUtil


resources_file_path = '/resources/airplane/airportNameList.ini'
scratch_url_old = 'https://data.variflight.com/profiles/profilesapi/search'
scratch_url = 'https://data.variflight.com/analytics/codeapi/initialList'
get_city_url = 'https://data.variflight.com/profiles/Airports/%s'

engine = create_engine(DB_URI)
Base = declarative_base(engine)  # SQLORM基类
session = sessionmaker(engine)()  # 构建session对象


class AirPorts(Base):
    __tablename__ = 'airPort'  # 表名
    id = Column(String(100), primary_key=True)
    ICAO = Column(String(100))
    en = Column(String(500))
    fn = Column(String(100))
    an = Column(String(100))
    text = Column(String(500))
    ct = Column(String(100))

# 传入查找网页的url和旧数据，然后本方法会比对原数据中是否有新的条目，如果有则不加入，如果没有则重新加入，最后返回新数据
def scratch_airport_name(scratch_url):
    # try:
        new_airports = []
        data = requests.get(scratch_url).text
        all_airport_json = json.loads(data)['data']
        for airport_by_word in all_airport_json.values():
             for airport in airport_by_word:
                  idss=airport['id']
                  ICAOss=airport['ICAO']
                  enss=airport['en']
                  fnss=airport['fn']
                  anss=airport['an']
                  textss=airport['text']
                  ctss=airport['ct']
                  air = AirPorts(id=idss, ICAO=ICAOss, en=enss,fn=fnss,an=anss,text=textss,ct=ctss)  # 创建一个student对象
                  session.add(air)  # 添加到session
                  session.commit()  # 提交到数据库
    # except:
    #     print("Hello")
         
    #print(new_airports)


#Base.metadata.create_all()
scratch_airport_name(scratch_url)
#
# if __name__ == '__main__':
#     #MainUtil.main(resources_file_path, scratch_url, scratch_airport_name)
#     scratch_airport_name(scratch_url,scratch_url_old)