#!/usr/bin/env python3
# -*- coding: utf8 -*-

from pathlib import Path
import logging
from requests import Session
from pyquery import PyQuery as pq
import re
from datetime import datetime
import sqlite3

import tdb


DL_FOLDER = Path('dl')
sn = Session()
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger('voa51')
test_url = 'https://downdb.51voa.com/201909/some-us-cities-move-to-limit-natural-gas-use.mp3'
VOA_Special_English = 'https://www.51voa.com/VOA_Special_English/'
re_date = re.compile(r'(\d+)[/-](\d+)[/-](\d+)')  # date maybe (2019-10-17) or (2019-10-17 11:24:01)  (2020/3/24)
db = sqlite3.connect("voa51.db")


def download(url: str, cb=None):
    """

    :param url:https://downdb.51voa.com/201909/some-us-cities-move-to-limit-natural-gas-use.mp3
    :param cb:
    :return:
    """

    _, pub, name = url.rsplit('/', maxsplit=2)
    folder = DL_FOLDER / pub
    if not folder.exists():
        logger.info(f'{folder} not exist, make it')
        folder.mkdir(parents=True, exist_ok=True)
    fn = folder / name
    if fn.exists():
        logger.info(f'fn exists, skip download')
    else:
        logger.info(f'Download: {url}')
        resp = sn.get(url)
        with open(folder / name, 'wb') as fp:
            for chunk in resp.iter_content(chunk_size=4096):
                fp.write(chunk)
    if callable(cb):
        cb(url)


def index_page(url: str):
    resp = sn.get(url)
    html = pq(resp.text)
    html.make_links_absolute(url)
    pages = []

    for lis in html('div.List>ul').items('li'):
        item = {}
        link = lis('a').eq(-1)
        item['url'] = link.attr('href')
        text = link.text()
        item['title'] = text
        # text is compised of body (date)
        s1 = re_date.findall(text)
        if len(s1) > 0:
            if len(s1[0]) != 3 :
                logger.warning('f{text} date not valid')
                continue
            s1 = list(map(int, s1[0]))
            d = datetime(year=s1[0], month=s1[1], day=s1[2])
            item['date'] = d
        pages.append(item)
    logger.debug(pages)
    return pages


def content_page(url: str):
    resp = sn.get(url)
    html = pq(resp.text)
    mp3 = html('a#mp3').attr('href')
    content = html('div.Content').html()
    return mp3, content


def main():
    tdb.table_init()
    ds = []
    for page in index_page(VOA_Special_English):
        if 'url' not in page:
            logger.warning(f'url key not found int {page}')
            continue
        url = page['url']
        found = tdb.db.execute("select url from voa51 where url = ?", (url.encode('utf8'), )).fetchone()
        logger.info(f"found {found} for {url}")
        if found:
            logger.info(f'Url: {url} already processed')
            continue
        mp3, content = content_page(page['url'])
        # title, url, content, date, mp3
        ds.append([page['title'], page['url'], content, page['date'], mp3])
    tdb.update_data(ds)
    mp3s = tdb.find_not_download()
    for mp3 in mp3s:
        download(mp3, tdb.update_download)
    logger.info("\n".join(tdb.find_not_download()))


if __name__ == '__main__':
    main()
