'''
    Ice Channel
    MBox
    By SJoneZ
'''

from entertainment.plugnplay.interfaces import MovieSource
from entertainment.plugnplay.interfaces import TVShowSource
from entertainment.plugnplay.interfaces import CustomSettings
from entertainment.plugnplay import Plugin
from entertainment import common
from entertainment.net import Net
import xbmc, re, os

class SJMBox(MovieSource, TVShowSource, CustomSettings):
    implements = [MovieSource, TVShowSource, CustomSettings]

    import xbmcaddon

    addon_id = 'script.icechannel.extn.sjmbox'
    addon = xbmcaddon.Addon(addon_id)
    
    name = "SJ-MBox"
    display_name = "MBox"
    apibase = 'http://mobapps.cc'
    dataurl = 'data/data_en.zip'
    useragent = 'android-async-http/1.4.1 (http://loopj.com/android-async-http)'
    datapath = xbmc.translatePath(addon.getAddonInfo('profile'))
    profile = os.path.join(datapath,'MBox')
    
    source_enabled_by_default = 'false'

    if not os.path.exists(datapath):
        os.makedirs(datapath)
    
    def __init__(self):
        xml = '<settings>\n'
        xml += '<category label="General">\n'
        xml += '<setting id="custom_urls" type="labelenum" label="URL" default="http://mobapps.cc" values="Custom|http://mobapps.cc" />\n'
        xml += '<setting id="custom_text_url" type="text" label="     Custom" default="" enable="eq(-1,0)" />\n'
        xml += '</category>\n' 
        xml += '</settings>\n'
        
        self.CreateSettings(self.name, self.display_name, xml)

    def get_url(self):
        custom_url = self.Settings().get_setting('custom_urls')
        if custom_url == 'Custom':
            custom_url = self.Settings().get_setting('custom_text_url')
        if not custom_url.startswith('http'):
            custom_url = ('http://' + custom_url)
        if not custom_url.endswith('/'):
            custom_url += '/'
        return custom_url
        
    def OPENURL(self, url, mobile = False, q = False, verbose = True, timeout = 10, cookie = None, data = None, cookiejar = False, log = True, headers = [], type = '',ua = False):
        import urllib2 
        UserAgent = 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3'
        if ua: UserAgent = ua
        try:
            if log:
                print "MU-Openurl = " + url
            if cookie and not cookiejar:
                import cookielib
                cookie_file = os.path.join(os.path.join(datapath,'Cookies'), cookie+'.cookies')
                cj = cookielib.LWPCookieJar()
                if os.path.exists(cookie_file):
                    try: cj.load(cookie_file,True)
                    except: cj.save(cookie_file,True)
                else: cj.save(cookie_file,True)
                opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
            elif cookiejar:
                import cookielib
                cj = cookielib.LWPCookieJar()
                opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
            else:
                opener = urllib2.build_opener()
            if mobile:
                opener.addheaders = [('User-Agent', 'Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_0 like Mac OS X; en-us) AppleWebKit/532.9 (KHTML, like Gecko) Version/4.0.5 Mobile/8A293 Safari/6531.22.7')]
            else:
                opener.addheaders = [('User-Agent', UserAgent)]
            for header in headers:
                opener.addheaders.append(header)
            if data:
                if type == 'json': 
                    import json
                    data = json.dumps(data)
                    opener.addheaders.append(('Content-Type', 'application/json'))
                else: data = urllib.urlencode(data)
                response = opener.open(url, data, timeout)
            else:
                response = opener.open(url, timeout=timeout)
            if cookie and not cookiejar:
                cj.save(cookie_file,True)
            link=response.read()
            response.close()
            opener.close()
            #link = net(UserAgent).http_GET(url).content
            link=link.replace('&#39;',"'").replace('&quot;','"').replace('&amp;',"&").replace("&#39;","'").replace('&lt;i&gt;','').replace("#8211;","-").replace('&lt;/i&gt;','').replace("&#8217;","'").replace('&amp;quot;','"').replace('&#215;','x').replace('&#038;','&').replace('&#8216;','').replace('&#8211;','').replace('&#8220;','').replace('&#8221;','').replace('&#8212;','')
            link=link.replace('%3A',':').replace('%2F','/')
            if q: q.put(link)
            return link
        except Exception as e:
            print '***********Website Error: '+str(e)+'**************'
            import traceback
            traceback.print_exc()
            link ='website down'
            if q: q.put(link)
            return link
        
    def downloadFile(self,url,dest,silent = False,cookie = None):
        try:
            import urllib2
            file_name = url.split('/')[-1]
            print "Downloading: %s" % (file_name)
            if cookie:
                import cookielib
                cookie_file = os.path.join(os.path.join(self.datapath,'Cookies'), cookie+'.cookies')
                cj = cookielib.LWPCookieJar()
                if os.path.exists(cookie_file):
                    try: cj.load(cookie_file,True)
                    except: cj.save(cookie_file,True)
                else: cj.save(cookie_file,True)
                opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
            else:
                opener = urllib2.build_opener()
            opener.addheaders = [('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')]
            u = opener.open(url)
            f = open(dest, 'wb')
            meta = u.info()
            if meta.getheaders("Content-Length"):
                file_size = int(meta.getheaders("Content-Length")[0])
            else: file_size = 'Unknown'
            file_size_dl = 0
            block_sz = 8192
            while True:
                buffer = u.read(block_sz)
                if not buffer: break
                file_size_dl += len(buffer)
                f.write(buffer)
                #status = r"%10d  [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / file_size)
                #status = status + chr(8)*(len(status)+1)
                #print status,
            print "Downloaded: %s %s Bytes" % (file_name, file_size)
            f.close()
            return True
        except Exception, e:
            print 'Error downloading file ' + url.split('/')[-1]
            self.ErrorReport(e, 'Source website is down')
            return False

    def GetFileHostsForContent(self, title, name, year, season, episode, type, list, lock, message_queue):                 
        
        name = self.CleanTextForSearch(name) 
        
        search_term = name
        mbox_url = ''
        vid_url = ''
        quality_dict = {'1080':'HD', '720':'HD', '540':'SD', '480':'SD', '360':'LOW', '240':'LOW'}
        
        print 'looking for Mbox Content: ' + search_term + ' (' + year + ')'

        if type == 'tv_episodes':
            tmp = self.getContentByType(search_term, year, 'tv')
            if len(tmp) > 0:
                mbox_id = tmp[0][1]
                mbox_url = self.get_url()+'api/serials/e?h='+mbox_id+'&u='+season+'&y='+episode
        elif type == 'movies':
            tmp = self.getContentByType(search_term, year, 'Movies')
            if len(tmp) > 0:
                mbox_url = tmp[0][1]

        if len(mbox_url) > 1:
            vid_url = self.resolveMBLink(mbox_url)
            if len(vid_url) > 1:
                try:
                    print 'Video URL is ' + vid_url
                    link = Net(cached=False, user_agent=self.useragent).http_GET(vid_url).content
                    urllist=[]
                    quaList=[]
                    match=re.findall('(?sim)<source src="([^"]+)"',link)

                    if len(match) == 0:
                        print 'No video URLs exist for this media'

                    for url in match:
                        qua=re.findall('(?sim).(\d+).mp4',url)
                        print 'found quality url [' + str(qua[0]) + ']: ' + url
                        self.AddFileHost(list, quality_dict.get(str(qua[0]), 'NA'), url, host='MBOX')
                        
                except Exception, e:
                    print 'Error resolving quality URLs for ' + vid_url
                    self.ErrorReport(e, 'Source website is down')
            else:
                print 'Could not resolve video URL for ' + mbox_url    
        else:
            print 'No MBox links found'

    def Resolve(self, url):
        return url

    def resolveMBLink(self, url):

        #net = Net(cached=False, user_agent=self.useragent)
        
        #print 'resolving ' + url

        r = re.findall('h=(\d+?)&u=(\d+?)&y=(\d+)',url,re.I)
        if r: r = int(r[0][0]) + int(r[0][1]) + int(r[0][2])
        else: r = 537 + int(re.findall('id=(\d+)',url,re.I)[0])
        try:
            #link = net.http_GET(url).content
            link=self.OPENURL(url, verbose=False, ua=self.useragent, timeout=60)
            q = re.findall('"lang":"en","apple":([-\d]+?),"google":([-\d]+?),"microsoft":"([^"]+?)"',link,re.I)
            if len(q) == 0:
                return ''
            vklink = "https://vk.com/video_ext.php?oid="+str(r + int(q[0][0]))+"&id="+str(r + int(q[0][1]))+"&hash="+q[0][2]
            vklink=vklink.replace("\/",'/')
            return vklink
        except Exception, e:
            print 'Error connecting to URL ' + url
            self.ErrorReport(e, 'Source website is down')
            return ''
    def file_age_in_seconds(self, pathname):
        import time, os, stat
        return time.time() - os.stat(pathname)[stat.ST_MTIME]

    def getContentByType(self, encode, year, type):
        import time, json
        try:
            returnList=[]
            custom_url = self.get_url()
            encode = encode.replace('%20',' ')
            encode = encode.replace(':', '(:|)')
            try:
                lib=os.path.join(self.datapath, 'MBox.zip')
                path=os.path.join(self.profile,type.lower()+'_lite.json')
                requireDownload = True
                if os.path.exists(path):
                    if self.file_age_in_seconds(path) < 3600:
                        requireDownload = False
                if requireDownload:
                    if self.downloadFile(custom_url+self.dataurl,lib,False):
                        xbmc.executebuiltin("XBMC.Extract(%s,%s)"%(lib,self.profile))
                        time.sleep(.2)
                else:
                    print 'Not downloading MBox metadata, a recent file is cached'
            except: pass
            f = open(path)
            field=json.loads(f.read())
            for data in field:
                if data['active'] == '1':
                    if type == 'Movies':
                        name = str(data["title"].encode('utf-8'))+' ('+str(data["year"])+')'
                        if (re.search('(?i)'+encode,name)) and (year == str(data["year"])):
                            print 'found ' + name
                            returnList.append((name,custom_url+'api/serials/get_movie_data?id='+str(data["id"])))
                    else:
                        name = str(data["title"].encode('utf-8'))
                        if re.search('(?i)'+encode,name):
                            print 'found ' + name
                            returnList.append((name,data["id"]))
            return returnList
        except Exception, e:
            self.ErrorReport(e, 'Source website is down')
            return []
            
    def ErrorReport(self, e, msg):
        print str(e)
        elogo = xbmc.translatePath('special://home/addons/script.icechannel.extn.sjmbox/resources/redx.png')
        xbmc.executebuiltin("XBMC.Notification([COLOR=FFFF0000]MBox Error[/COLOR],"+msg+",10000,"+elogo+")")
        xbmc.log('***********MBox Error: '+str(e)+'**************', xbmc.LOGERROR)
