# -*- coding: utf-8 -*-

import time
import json
import sched
from subprocess import run
from dolphin.biz.spider_urls_rest import SpiderUrlsRest
from dolphin.biz.google_book_biz import GoogleBookBiz
from dolphin.biz.word_rest import WordRest
from dolphin.common.commonlogger import commonlogger

#from Scrapy.cmdline import execute
logger = commonlogger().getlogger()

schedule = sched.scheduler ( time.time, time.sleep )

#ptvsd.enable_attach(address = ('10.142.0.2', 5678))
#ptvsd.wait_for_attach()

timeout = 60

def period_execute():
  try: 
    word = WordRest.get_query_key_word_by_restservice(WordRest)
    single_word = word[0]["word"]
    urls = GoogleBookBiz.get_all_scrapy_urls(GoogleBookBiz,single_word)
    if(urls):
      try:
        # This will raise an exception if scrapy fails
        # which is _probably_ a good idea in most scenarios
        run(["scrapy", "crawl", "googlebook", "-a", "arg=" + json.dumps(urls)],check=True)        
      except BaseException as e:
        logger.error(e)
    # recommand to slow speed to avoid ban from server
    time.sleep(10)
  except Exception as e:
    logger.error(e)

def period_action(inc):
  schedule.enter(inc,0,period_action,(inc,))
  period_execute()

def schedule_define():
  schedule.enter(0,0,period_action,(3,))

if __name__ == '__main__':
    try:
      schedule_define()
      schedule.run()
    except ImportError as exc:
      logger.error(exc)