import os
import time
import datetime
import functools
from common import DbHelper
from novel import NovelCrawler
from faq import Faq
from yt_dlp import YoutubeCrawler
from fm import QingtingfmCrawler


class Podcast(object):
    def __init__(self, save_path):
        self.save_path = save_path
        self.db = DbHelper()

    def gen_podcast(self):
        self.db.clean_table()
        rss_items = self.db.get_lastest_rss_items()
        with open(self.save_path + 'all.rss', 'w', encoding='utf8') as fp:
            fp.write('<?xml version="1.0" encoding="UTF-8" ?>')
            fp.write('<rss version="2.0">')
            fp.write('<channel>')
            fp.write('<title>GJH合集</title>\n')

            for item in rss_items:
                fp.write('<item>')
                fp.write('<title>%s</title>' % item[1])
                fp.write('<link>%s</link>' % item[2])
                if item[3]:
                    fp.write(
                        '<enclosure url="%s" type="audio/mpeg" />' % item[3])
                if item[4]:
                    fp.write('<pubDate>%s</pubDate>' % item[4].strip())
                if item[5]:
                    fp.write(
                        '<description><![CDATA[%s]]></description>' % item[5])
                fp.write('</item>\n')
            fp.write('</channel></rss>')
        self.db.close()
        self.__clean_old_files()

    def __clean_old_files(self):
        now = datetime.datetime.now()
        n_days = now + datetime.timedelta(days=-10)
        # print(n_days.strftime('%Y-%m-%d %H:%M:%S'))

        rss_path = self.save_path+'/rssfiles'
        filenames = os.listdir(rss_path)
        for fn in filenames:
            fp = os.path.join(rss_path, fn)
            if os.path.isdir(fp):
                continue
            mtime = datetime.datetime.fromtimestamp(os.path.getmtime(fp))
            if mtime < n_days:
                os.remove(fp)

# 处理任务抛出异常


def catch_exceptions(print_trace=True, cancel_on_failure=False):
    def catch_exceptions_decorator(job_func):
        @functools.wraps(job_func)
        def wrapper(*args, **kwargs):
            try:
                return job_func(*args, **kwargs)
            except:
                if print_trace:
                    import traceback
                    print(traceback.format_exc())
                # if cancel_on_failure:
                #     return schedule.CancelJob
        return wrapper
    return catch_exceptions_decorator


@catch_exceptions(cancel_on_failure=False)
def crawl_novel():
    now = datetime.datetime.now()
    if now.hour % 7 != 0 or now.minute > 10:
        return
    print('>>>>>>>>>> novel:\t'+now.strftime("%Y-%m-%d %H:%M:%S"))
    home_path = '/home/gjh/mynotes/'
    crawler = NovelCrawler(home_path)
    # crawler.crawl_one('trdcz', 'https://m.147xs.org/book/136540/', "//div[@class='book_last']/dl/dd/a", '//div[@id="nr"]//text()')
    # crawler.crawl_one('trdcz', 'https://m.147xs.org/book/136540/', "//div[@class='book_last']/dl/dd/a", '//div[@id="nr"]')
    crawler.crawl()


@catch_exceptions(print_trace=True)
def crawl_youtube():
    print('>>>>>>>>>> youtube:\t' +
          datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
    # crawler = YoutubeCrawler('/home/gjh/mynotes/', 'http://192.168.31.77:1083')
    crawler = YoutubeCrawler('/home/gjh/mynotes/', 'http://127.0.0.1:10809')
    crawler.crawl()


@catch_exceptions(cancel_on_failure=True)
def crawl_qingtingfm():
    print('>>>>>>>>>> qingtingfm:\t' +
          datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
    crawler = QingtingfmCrawler()
    crawler.crawl()


@catch_exceptions(print_trace=True)
def gen_question():
    print('>>>>>>>>>> gen_question:\t' +
          datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
    faq = Faq()
    faq.gen_question_rss()


@catch_exceptions(print_trace=True)
def update_rss():
    print('>>>>>>>>>> update_rss:\t' +
          datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
    rss = Podcast('/home/gjh/mynotes/')
    rss.gen_podcast()


@catch_exceptions(print_trace=True)
def fetch_feeds():
    print('>>>>>>>>>> fetch feeds:\t' +
          datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
    from feedmaker import FeedMaker
    crawler = FeedMaker('/home/gjh/mynotes')
    crawler.crawl()


def run_once():
    fetch_feeds()
    # gen_question()
    # crawl_novel()
    # crawl_youtube()
    # crawl_qingtingfm()
    # update_rss()


# def main_loop():
#     # schedule.every(90).minutes.do(crawl_novel)
#     schedule.every(10).minutes.do(crawl_youtube)
#     schedule.every(12).hours.do(gen_question)
#     # schedule.every(10).minutes.do(crawl_qingtingfm)
#     schedule.every(10).minutes.do(update_rss)
#     # schedule.every().day.at("06:00").do(crawl_qingtingfm)

#     while True:
#         schedule.run_pending()
#         # schedule.run_pending()
#         update_rss()
#         print('-------------------------------------------------------')
#         time.sleep(10*60)

if __name__ == '__main__':
    while True:
        print(datetime.datetime.now())
        print('-------------------------------------------------------')
        run_once()
        time.sleep(10*60)

    # crawl_qingtingfm()
    # update_rss()
    # crawl_novel()
    # main_loop()
