from urllib import request
from urllib import error


def download(url, path, proxy_switch=False):
    print(url + ' > ' + path)
    try:
        if proxy_switch:
            proxy_support = request.ProxyHandler({'http': 'localhost:1080'})
            opener = request.build_opener(proxy_support)
            request.install_opener(opener)
        with request.urlopen(url) as web:
            with open(path, 'wb') as outfile:
                outfile.write(web.read())
    except error.URLError:
        with request.urlopen(url) as web:
            with open(path, 'wb') as outfile:
                outfile.write(web.read())


def download_wiki_project_count():
    for year in range(2016, 2017):
        for month in range(1, 2):
            for day in range(26, 31):
                for hour in range(24):
                    url = 'https://dumps.wikimedia.org/other/pagecounts-raw/{year}/{year}-{month}/projectcounts-{year}{month}{day}-{hour}0000'\
                        .format(year=year, month='%02d' % month, day='%02d' % day, hour='%02d' % hour)
                    path = 'data/wiki/projectcounts-{year}{month}{day}-{hour}0000'\
                        .format(year=year, month='%02d' % month, day='%02d' % day, hour='%02d' % hour)
                    download(url, path)


def download_wiki_page_count():
    for year in range(2016, 2017):
        for month in range(1, 2):
            for day in range(2, 8):
                for hour in range(24):
                    url = 'https://dumps.wikimedia.org/other/pagecounts-raw/{year}/{year}-{month}/pagecounts-{year}{month}{day}-{hour}0000.gz'\
                        .format(year=year, month='%02d' % month, day='%02d' % day, hour='%02d' % hour)
                    path = 'data/wiki/raw/pagecounts-{year}{month}{day}-{hour}0000.gz'\
                        .format(year=year, month='%02d' % month, day='%02d' % day, hour='%02d' % hour)
                    download(url, path, proxy_switch=True)


if __name__ == '__main__':
    download_wiki_page_count()
