# from db_orm.db_orm import DB
from pp.common import SU_NING_DB
from pp.db_model import ShopModel
from pp.db_model import ProductModel


# 新增公司
class CompanySavePipeline:

  def open_spider(self, spider):
    self._db = SU_NING_DB

  def process_item(self, d, spider):
    if(d != None):

      company = d.get('body').get('company')
      session = self._db.DBSession()
      session.add(company)
      # 返回新增 ID
      # session.flush()
      session.commit()
      session.close()


# 新增店铺信息
class ShopSavePipeline:

  def open_spider(self, spider):
    self._db = SU_NING_DB

  def process_item(self, shop, spider):

    if(shop != None):
      _shop_model = ShopModel(
        shop_name = shop['shop_name'],
        shop_id = shop['shop_id'],
        shop_url = shop['shop_url'],
        shop_log = shop['shop_log'],
        shop_type = shop['shop_type'],
        
        company_telphone = shop['company_telphone'],
        company_address = shop['company_address'],
        company_name = shop['company_name'],

        country = shop['country'],
        province = shop['province'],
        city = shop['city'],
        area = shop['area'],

        create_time = shop['create_time'],
      ) 

      session = self._db.DBSession()
      session.add(_shop_model)   
      session.commit()
      session.close()
      
    return shop

# 更新店铺信息
class ShopUpdatePipeline:

  def open_spider(self, spider):
    self._db = SU_NING_DB

  def process_item(self, pk, spider):

    shop = pk.get('body')
    if shop == None:
      return
    
    print('-----------------')
    print(dict(shop))
    print('-----------------')
    
    session = self._db.DBSession()
    session.query(ShopModel).filter(
      ShopModel.id == shop.id
    ).update(dict(shop))

    session.commit()
    session.close()

    return shop


# 保存产品信息
class ProductSavePipeline:

  def open_spider(self, spider):
    self._db = SU_NING_DB

  def process_item(self, product_list, spider):

    if(product_list != None):
      session = self._db.DBSession()
      session.add_all(product_list.get('list', []))   
      session.commit()
      session.close()

    return product_list


# import scrapy
# import hashlib
# from urllib.parse import quote
# from itemadapter import ItemAdapter
# class PictrueDownLoadPipeline:
  
#   async def process_item(self, item, spider):

#       _url = item.get('url')
#       encoded_item_url = quote(_url)
#       request = scrapy.Request(encoded_item_url)
#       response = await spider.crawler.engine.download(request, spider)
#       if response.status != 200:
#         return item

#       url_hash = hashlib.md5(_url.encode("utf8")).hexdigest()
#       filename = f"{url_hash}.png"
#       with open(filename, "wb") as f:
#         f.write(response.body)

#       # Store filename in item.
#       print('filename: ', filename)
#       return item
