import scrapy
import os
import re
import json


from utils.timestamp import create_timestamp
from utils.url import create_shop_detail_json_url
from utils.actuator import actuator
from utils.conf_read import Conf_Read

# from utils.print import startPrint

from pp.log import create_log_filename
from pp.items import Shop as ShopItem


def print_http_info(response):
  _msg = '''
    --------------
    url: {url}
    status: {status}
    --------------
    '''.format(url = response.url, status = response.status)
    
  print(_msg)

def get_conf_path(file_name):
  _current_path = os.path.dirname(__file__)
  return os.path.join(_current_path, file_name)

class Shop(scrapy.Spider):
  name = 'shop'

  _conf_read = Conf_Read(get_conf_path('shop_settings.yaml'))
  
  custom_settings = {
    'LOG_FILE': create_log_filename('shop'),
    'LOG_LEVEL': 'WARNING',
    'ITEM_PIPELINES': {
      'pp.pipelines.ShopSavePipeline': 300,
    },
    'DOWNLOADER_MIDDLEWARES': {
      # 'pp.middlewares.GetHttpStatus': 543,
    }
  }

  def start_requests(self):
    _common_conf = self._conf_read.conf
    
    print('当前ID {} / 目标ID'.format(_common_conf['current_shop_id']))
    _target_shop_id = int(input()) 
    
    while _common_conf['current_shop_id'] <= _target_shop_id:
      _url = create_shop_detail_json_url(_common_conf['current_shop_id'])
      yield scrapy.Request(url = _url, callback = self.parse)
      _common_conf['current_shop_id'] += 1

  
  def parse(self, response):
    print_http_info(response)

    # 提取json数据
    res_str = re.findall(r'shopinfo\((.*)\)', response.text, re.S)
    hasErr, res_json = actuator(json.loads)(res_str[0])
    
    if(hasErr == False):
      print('ERROR: ', res_json)
      return

    _shop = None
    
    if('shopName' in res_json.keys()):

      _shop = ShopItem(
        shop_name = res_json.get('shopName', ''),
        shop_id = res_json.get('shopId', ''),
        shop_url = res_json.get('indexUrl', ''),
        shop_log = res_json.get('logoUrlBig', ''),
        shop_type = res_json.get('shopType', 0),
        
        company_telphone = res_json.get('telPhone', ''),
        company_address = res_json.get('companyAddress', ''),
        company_name = res_json.get('companyName', ''),

        country = res_json.get('countryName', ''),
        province = res_json.get('companyProvince', ''),
        city = res_json.get('companyCity', ''),
        area = '',

        create_time = create_timestamp(),
      )

      print('>>>', _shop['shop_name'])

    yield _shop


  # 记录已遍历 shop_id
  def closed(self, status):
    print('cache: ', self._conf_read.conf)
    self._conf_read.updateConf()
    
# current_shop_id: 70300001
