# -*- coding: utf-8 -*-

from pymongo import errors
from pymongo.mongo_client import MongoClient
from pymongo.mongo_replica_set_client import MongoReplicaSetClient
from pymongo.read_preferences import ReadPreference


from scrapy.exceptions import DropItem

from fifacrawl.items import PlayerIdItem, PlayerDetailItem
from fifacrawl.spiders.playerid import PlayerIdSpider

import time


class PlayIdSavePipeline(object):
  """
  only process PlayerIdItem and PlayerIdSpider
  """

  config = {
      'uri': 'mongodb://localhost:27017',
      'fsync': False,
      'write_concern': 0,
      'database': 'scrapy-mongodb',
      'collection': 'items'
  }


  pidarr = []



  def open_spider(self, spider):
    if isinstance(spider, PlayerIdSpider):
      self.settings = spider.settings
      # Connecting to a stand alone MongoDB
      connection = MongoClient(
          self.settings['MONGODB_URI'],
          fsync=True,
          read_preference=ReadPreference.PRIMARY)

      # Set up the collection
      database = connection[self.settings['MONGODB_DATABASE']]
      self.collection = database[self.settings['MONGODB_PLAYER_ID_COLLECTION']]

      # clear old date
      self.collection.delete_many({})


  def process_item(self, item, spider):
    if isinstance(item, PlayerIdItem):

      if item['pid'] not in self.pidarr:
        self.pidarr.append(item['pid'])

      # didn't process by next pipeline
      raise DropItem()

    if isinstance(item, PlayerDetailItem):
      return item


  def close_spider(self, spider):
    if isinstance(spider, PlayerIdSpider) and len(self.pidarr) > 0:
      ts = int(time.time())
      self.collection.insert([ {'pid': pid, 'ts': ts } for pid in self.pidarr ], continue_on_error=True)