#coding:utf-8
"""
*   TG_Coder : Dzlua
*   Email    : 505544956@qq.com
*   Time     : 2017/08/23
"""

from bs4 import BeautifulSoup

from spider import Spider
from database import DataBase

import codecs
import re
import urlparse

#-----------------------
class LibStar(Spider, DataBase):
    def __init__(self, host, user, pwd, db, threads_work = 4, tasks = 0):
        self.__tb = 'libstar';
        DataBase.__init__(self, host, user, pwd, db)
        sql = 'CREATE TABLE IF NOT EXISTS ' + self.__tb + \
            ' (id INTEGER AUTO_INCREMENT,name TEXT,type TEXT,href TEXT,star INT,date TIMESTAMP, PRIMARY KEY(id) ) \
            DEFAULT CHARSET=utf8;'
        DataBase.execute(self, sql)

        Spider.__init__(self, threads_work, tasks)

        self.__hosts = {
            'git.oschina.net': {'type':'git','onpage':1},
            'github.com': {'type':'git','onpage':2},
            'code4app.com': {'type':'zip','onpage':3}
        }

        self.read(self.__on_read)

    def _select(self, conn, sql):
        cursor = conn.cursor()
        cursor.execute(sql)
        result = cursor.fetchall()
        cursor.close()
        return result
    def _check_url(self, info):
        url = info['name']
        info['url'] = url
        
        # zip
        if url[url.rfind('.') : len(url)] == u'.zip':
            info['type'] = 'zip'
            info['href'] = info['name']
            info['star'] = 0
            return 0
        # 
        if url[url.rfind('.', 0, url.rfind('.')) : len(url)] == u'.tar.gz':
            info['type'] = 'tar.gz'
            info['href'] = info['name']
            info['star'] = 0
            return 0
        #
        url = re.sub(r'/releases', '', url)
        url = re.sub(r'/downloads', '', url)
        #
        info['url'] = url
        up = urlparse.urlparse(url)
        for k in self.__hosts:
            if up.netloc == k:
                info['type'] = self.__hosts[k]['type']
                return self.__hosts[k]['onpage']
        return -1
    def __on_read(self, conn, args):
        sql = "select id,href from libinfo;"
        result = self._select(conn, sql)
        length = len(result)
        for r in result:
            print('[LibStar] id-> %s/%s, href-> %s' % (r[0], length, r[1]))
            sql = "select id from " + self.__tb + " where name='%s'" % (r[1])
            result = self._select(conn, sql)
            if result:
                continue
            
            info = {'id': r[0], 'name': r[1]}
            onpage = self._check_url(info)
            if onpage == 0:
                self.save(self.__on_save, info)
            elif onpage == 1:
                self.put(self.__on_page_oschina, info['url'], None, info)
            elif onpage == 2:
                self.put(self.__on_page_github, info['url'], None, info)
            elif onpage == 3:
                self.put(self.__on_page_code4app, info['url'], None, info)
    def __on_save(self, conn, data, args):
        try:
            cursor = conn.cursor()
            sql = "INSERT INTO " + self.__tb + " (name,type,href,star) VALUES(%s,%s,%s,%s);"
            cursor.execute(sql, (data['name'], data['type'], data['href'], data['star']) )
            cursor.close()
            conn.commit()
        except conn.Error, e:
            err = '[LibStar] Error __on_save : %d %s, %s ' % (e[0], e[1], data)
            self.write(err + '\n')
            print(err)
        finally:
            print('[LibStar] save id: %d, name: %s, data:%s' % (data['id'], data['name'], data))
    def __on_page_oschina(self, ret, url, args):
        if not ret:
            print('[LibStar] %s: no resoult.' % (url))
            return
        soup = BeautifulSoup(ret, "lxml")

        # star
        star_con = soup.find('span', class_ = ['star-container'])
        if not star_con:
            print('[LibStar] %s: no .star-container.' % (url))
            return
        star = star_con.find('a', class_ = ['social-count'])
        if not star:
            print('[LibStar] %s: no .social-count.' % (url))
            return
        args['star'] = re.sub(r',','', self.strip(star.get('title')))
        #
        href = soup.find('input', id = 'project_clone_url')
        if href:
            args['href'] = href.get('value')
            self.save(self.__on_save, args)
    def __on_page_github(self, ret, url, args):
        if not ret:
            print('[LibStar] %s: no resoult.' % (url))
            return
        soup = BeautifulSoup(ret, "lxml")
        #
        star = soup.find('a', class_ = ['js-social-count'])
        if not star:
            print('[LibStar] %s: no .social-count .js-social-count.' % (url))
            return 
        args['star'] = re.sub(r',','', self.strip(star.text))

        href = soup.find('input', class_ = ['input-monospace'])
        if href:
            args['href'] = href.get('value')
            self.save(self.__on_save, args)
    def __on_page_code4app(self, ret, url, args):
        if not ret:
            print('[LibStar] %s: no resoult.' % (url))
            return
        #
        data = {'code':None,'code_id':None,'fav_num':None}
        jss = re.findall(r'<script type="text/javascript">\n([\s\S]+?)</script>', ret, re.M)
        for js in jss:
            if js.find('code_id') == -1:
                continue
            js = self.strip(js)
            js = re.sub(r'\'', '', js)
            js = re.sub(r'"', '', js)
            vs = js.split(';')
            for v in vs:
                if not data['code']:
                    data['code'] = re.sub(r'varcode=', '', v)
                    continue
                if not data['code_id']:
                    data['code_id'] = re.sub(r'varcode_id=', '', v)
                    continue
                if not data['fav_num']:
                    data['fav_num'] = re.sub(r'varfav_num=', '', v)
                    continue
            break

        #
        args['href'] = 'http://www.code4app.com/down/' + data['code'] + '/?id=' + data['code_id']
        args['star'] = re.sub(r',','', self.strip(data['fav_num']))

        #
        self.save(self.__on_save, args)
    def write(self, text):
        f = codecs.open(self.__tb + '.log', 'a', 'utf-8')
        f.write(text)
        f.close()
#-----------------------
libstar = LibStar('localhost', 'dzlua', 'dzlua', 'spider', 10, 100)
libstar.wait()
libstar.close()