#! /usr/bin/env python
# -*- coding:utf-8 -*-
# vim:fileencoding=utf-8


from contextlib import closing
from urlparse import urljoin
from urllib2 import Request, urlopen
import time

request_header = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1500.52 Safari/537.36'}

def urlVerify(url, **kargs):
    """验证url是否是有效的url"""
    retval = False
    baseUrl = kargs.get('base_url', None)
    sleep = kargs.get('sleep', 5)
    timeout = kargs.get('timeout', 30)
    retry = kargs.get('retry', 0)
    headers = kargs.get('headers', request_header)

    if url:
        if baseUrl:
            url = urljoin(baseUrl, url)

        while True:
            try:
                request = Request(url, headers = request_header)

                with closing(urlopen(request)) as f:
                    retval = True

            except Exception as e:
                print url, str(e)

                if retry < 1:
                    break
                else:
                    retry -= 1
                    time.sleep(sleep)
            else:
                break;

    return retval


def realUrl(url, **kargs):
    """获取真实的url"""
    _url = '' 
    baseUrl = kargs.get('base', None)
    sleep = kargs.get('sleep', 5)
    timeout = kargs.get('timeout', 30)
    retry = kargs.get('retry', 0)
    headers = kargs.get('headers', request_header)

    if baseUrl:
        url = urljoin(baseUrl, url)

    if url:

        while True:
            try:
                request = Request(url, headers=headers)

                with closing(urlopen(request, timeout=timeout)) as f:
                    _url = f.geturl()

            except Exception as e:
                print 'Get Real url fail, message: %s' % str(e)

                if retry < 1:
                    break;
                else:
                    retry = retry - 1
                    time.sleep(sleep)
            else:
                break;

    return _url


def urlGetContents(url, **kargs):
    body = None
    baseUrl = kargs.get('base_url', None)
    sleep = kargs.get('sleep', 5)
    timeout = kargs.get('timeout', 30)
    retry = kargs.get('retry', 0)
    headers = kargs.get('headers', request_header)

    if baseUrl:
        url = urljoin(baseUrl, url)

    if url:
        while True:
            try:
                request = Request(url, headers=headers)

                with closing(urlopen(request, timeout=timeout)) as f:
                    body = f.read()

            except Exception as e:
                print 'Open url %s fail, message: %s' % (url, str(e))

                if retry < 1:
                    break
                else:
                    retry -= 1
                    time.sleep(sleep)
            else: 
                break

    return body
