from scrapy.spider import BaseSpider
from PeteParker.items import GeneralItem
from scrapy import log

class MainSpider(BaseSpider):
  name = 'MainSpider'
  #allowed_domains = []
  start_urls = []

  def __init__(self, name=None, url=None, **kwargs):
    # You can pass a specific url to retrieve 
    if url:
      if name is not None:
        self.name = name
      elif not getattr(self, 'name', None):
        log.msg("%s must have a name" % type(self).__name__, level=log.WARNING)
        raise ValueError("%s must have a name" % type(self).__name__)
      self.__dict__.update(kwargs)
      self.start_urls = [url]
    else:
      log.msg("No links given, quit now", level=log.ERROR)
      print("No Links given, quit now")
      return None

  def parse(self, response):
    item = GeneralItem()
    item['spider'] = self.name
    item['contentType'] = 'unknown'
    #print(response.headers)
    item['headers'] = response.headers
    #print(item['headers'])
    item['url'] = response.url
    item['status'] = response.status
    item['body'] = response.body
    item['meta'] = response.meta
    item['flags'] = response.flags
    log.msg("jump to pipe", level=log.INFO)
    return item
