__author__ = 'YinDu'

from utils.CrawlerTool import *
from celery_task.app_scripts.test1 import *


def day_sleep():
    # 数据睡眠时间
    day_week = datetime.datetime.now().weekday()
    time_d = 7 - day_week
    if day_week == 5 or day_week == 6:
        time.sleep((time_d * 32 * 3600 + 3000 + time_wee_hours()) - time_now())
    else:
        if day_week == 4 and time_now() >= str_change_timestamp(datetime_now() + ' ' + '15:05:00'):
            time.sleep((time_d * 32 * 3600 + 3000 + time_wee_hours()) - time_now())
        elif day_week == 0 and time_now() <= str_change_timestamp(datetime_now() + ' ' + '08:55:00'):
            time.sleep(str_change_timestamp(datetime_now() + ' ' + '08:50:00') - time_now())
        else:
            if time_now() >= str_change_timestamp(datetime_now() + ' ' + '15:05:00'):
                time.sleep(time_wee_hours() + 32 * 3600 + 3000 - time_now())
            elif time_now() <= str_change_timestamp(datetime_now() + ' ' + '08:55:00'):
                time.sleep(time_wee_hours() + 8 * 3600 + 3000 - time_now())
            else:
                time.sleep(5)


class BugStock:
    def __init__(self, url='', name='sh000006'):
        self.url = url  # 爬取路由
        self.name = name  # 想要爬取的股票码

    def __iter__(self):
        return self

    def __next__(self):
        request_data = rweb.request_get(self.url + self.name)
        if request_data != "":
            if request_data.split('"')[1] != '':
                demand_data = request_data.split('"')[1].split(',')
                test11.delay(time_now=demand_data[-3], price=demand_data[3], stock_name=self.name,
                             stock_time=demand_data[-4])


if __name__ == "__main__":
    bstock = BugStock()
    for the_stock in bstock:
        day_sleep()
import time

k = time.strftime("%Y- %m - %d % H: % M: % S", 1637552103)
print(k)
