from urllib2 import urlopen, Request, HTTPError, URLError
import re
from BeautifulSoup import BeautifulSoup, SoupStrainer

BASE_URL = 'http://www.hindishows.com'


class NetworkError(Exception):
    pass


def get_channels():
    url = BASE_URL + '/hindi-tv-channels.php'
    contentDiv = SoupStrainer('div',{'id':['hindi-tv-channels']})
    tree = __get_tree(url ,contentDiv)
    videos = []
    for li in tree.findAll('li'):
        videos.append({
            'label': li.a.getText(),
            'thumbnail': '',
            'path': li.a['href']
        })
    return videos

def get_tvshows(channelurl):
    url = BASE_URL + '/' + channelurl
    contentDiv = SoupStrainer('table',{'id':['cat-shows-live-box','cat-shows-dead-box']})
    tree = __get_tree(url ,contentDiv)
    tvshows = []
    
    for a in tree.findAll('a'):
        if  a.parent.name == 'div':
            tvshows.append({
                       'label': a.getText(),
                       'thumbnail': BASE_URL + a.img['src'],
                       'path': a['href']
                   })
        elif a.parent.name != 'u' and a.parent.parent.parent.name != 'div':
            tvshows.append({
                        'label': a.getText(),
                        'thumbnail': '',
                        'path': a['href']
                    })                    
            
    return tvshows        

def get_showepisode(episode):
    url = BASE_URL + '/' + episode
    contentDiv = SoupStrainer('div',{'id':['default-video-list']})
    tree = __get_tree(url ,contentDiv)
    episodes = []
    for li in tree.findAll('li'):
        episodes.append({'label': li.a.getText(),
                         'path': li.a['href'].strip()
                        })
    return episodes

def get_showlink(showurl):
    url = BASE_URL + '/' + showurl
    contentDiv = SoupStrainer('link',{'rel':'image_src'})
    tree = __get_tree(url ,contentDiv)
    video_source_img =str(tree.link['href'])
    videoid= video_source_img.replace('http://i4.ytimg.com/vi/','').replace('/default.jpg','')
    print "plugin://plugin.video.youtube/?action=play_video&videoid=" + videoid
    showlinks = []
    showlinks.append({'label': 'Click to Play Video' + videoid,
                               'thumbnail': video_source_img,
                             'path': "plugin://plugin.video.youtube/?action=play_video&videoid=" + videoid,
                             'is_playable': True
                            })    
    return showlinks    
    
def get_rtmp_params(path):
    video_page_url = BASE_URL + path
    html = __get_url(video_page_url)

    r_swf_url = re.compile("src: '(.*?)'")
    r_rtmp_url = re.compile("streamer: '(.*?)'")
    r_playpath = re.compile("file: '(.*?)'")
    r_app = re.compile("rtmp://.*?/(.*)")

    rtmp_url = re.search(r_rtmp_url, html).group(1)
    playpath = re.search(r_playpath, html).group(1)
    swf_url = re.search(r_swf_url, html).group(1)
    app = re.search(r_app, rtmp_url).group(1)

    rtmp_params = {
        'rtmp_url': rtmp_url,
        'playpath': 'mp4:%s' % playpath,
        'app': app,
        'swf_url': swf_url,
        'video_page_url': video_page_url,
    }
    return rtmp_params

def __resize_icon(url):
    return url.strip().replace('180_y', '590_y')


def __get_tree(url ,contentDiv):
    #log('__get_tree opening url: %s' % url)
    req = Request(url)
    req.add_header('Accept', ('text/html,application/xhtml+xml,'
                              'application/xml;q=0.9,*/*;q=0.8'))
    req.add_header('User-Agent', ('Mozilla/5.0 (X11; Linux i686) '
                                  'AppleWebKit/535.21 (KHTML, like Gecko) '
                                  'Chrome/19.0.1041.0 Safari/535.21'))
    html = __get_url(req)
    tree = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES,  parseOnlyThese=contentDiv)
    return tree


def __get_url(req):
    try:
        html = urlopen(req).read()
    except HTTPError, error:
        log('__urlopen HTTPError: %s' % error)
        raise NetworkError('HTTPError: %s' % error)
    except URLError, error:
        log('__urlopen URLError: %s' % error)
        raise NetworkError('URLError: %s' % error)
    return html


def log(msg):
    print('%s scraper: %s' % (BASE_URL, msg))
