'''
Created on 2016年8月17日

@author: mengwei
'''

import urllib.request
from bs4 import BeautifulSoup
import mysql.connector
import time
import redis
import threading
import traceback

def getMainInfo(userHome, headers):
    request = urllib.request.Request(userHome, headers = headers)
    source_code = urllib.request.urlopen(request).read()
    plain_text = source_code.decode("utf-8")
    soup = BeautifulSoup(plain_text, "html.parser")
    full_name = soup.find('div', {'class':'vcard-fullname'}).get_text()
    user_name = soup.find('div', {'class':'vcard-username'}).get_text()
    organizationTag = soup.find('li', {'aria-label':'Organization'})
    organization = ""
    if organizationTag:
        organization = organizationTag.get_text()
    locationTag = soup.find('li', {'aria-label':'Home location'})
    location = ""
    if locationTag:
        location = locationTag.get_text()
    bioTag = soup.find('div', {'class':'user-profile-bio'})
    bio = ""
    if bioTag:
        bio = bioTag.get_text()
    emailTag = soup.find('li', {'aria-label':'Email'})
    email = ""
    if emailTag:
        email = emailTag.get_text()
    urlTag = soup.find('li', {'aria-label':'Blog or website'})
    url = ""
    if urlTag:
        url = urlTag.get_text()
    join_time = soup.find('local-time', {'class':'join-date'})['datetime']
    info = soup.find_all('strong', {'class':'vcard-stat-count d-block'})
    followers = info[0].get_text()
    if followers.find('k') > 0:
        followers = float(followers.replace('k','')) * 1000
    else:
        followers = int(followers)
    starred = info[1].get_text()
    if starred.find('k') > 0:
        starred = float(starred.replace('k','')) * 1000
    else:
        starred = int(starred)
    following = info[2].get_text()
    if following.find('k') > 0:
        following = float(following.replace('k','')) * 1000
    else:
        following = int(following)
    user = {
        'name' : user_name,
        'full_name' : full_name,
        'email' : email,
        'bio' : bio,
        'url' : url,
        'company' : organization,
        'location' : location,
        'join_time' : time.strptime(join_time, '%Y-%m-%dT%H:%M:%SZ'),
        'followers' : followers,
        'starred' : starred,
        'following' : following
    }
    orgTag = soup.find_all('a', {'class':'tooltipped tooltipped-n avatar-group-item'})
    organizations = []
    if orgTag:
        for tag in orgTag:
            organizations.append(tag['aria-label'])
        user['organizations'] = organizations
    return user

def getStarsAndForks(repositories, headers, user):
    request = urllib.request.Request(repositories, headers = headers)
    source_code = urllib.request.urlopen(request).read()
    plain_text = source_code.decode("utf-8")
    soup = BeautifulSoup(plain_text, "html.parser")
    repoList = soup.find_all('div', {'class':'repo-list-item public source'})
    stars = 0
    forks = 0
    for repo in repoList:
        aTag = repo.find('div', {'class':'repo-list-stats'}).find_all('a')
        stars += int(aTag[0].get_text().strip('\n').strip().replace(',',''))
        forks += int(aTag[1].get_text().strip('\n').strip().replace(',',''))
    user['stars'] = stars
    user['forks'] = forks
    
def getUserRepositories(repositoryUrl, headers, user):
    request = urllib.request.Request(repositoryUrl, headers = headers)
    source_code = urllib.request.urlopen(request).read()
    plain_text = source_code.decode("utf-8")
    soup = BeautifulSoup(plain_text, "html.parser")
    return getRepositories(soup, user)

def getRepositories(soup, user):
    repoList = soup.find_all('div', {'class':'repo-list-item public source'})
    stars = 0
    forks = 0
    repositories = []
    for repo in repoList:
        name = repo.find('a', {'itemprop':'name codeRepository'}).get_text().strip('\n').strip().replace(',','')
        description = ""
        description = repo.find('p', {'itemprop':'description'})
        if description:
            description = description.get_text().strip('\n').strip().replace(',','')
        updated = repo.find('relative-time')['datetime']
        aTag = repo.find('div', {'class':'repo-list-stats'}).find_all('a')
        star = int(aTag[0].get_text().strip('\n').strip().replace(',',''))
        stars += star
        fork = 0
        repository = {
            'name' : name,
            'user_name' : '',
            'org_name' : '',
            'description' : description,
            'updated' : time.strptime(updated, '%Y-%m-%dT%H:%M:%SZ'),
            'stars' : star,
            'forks' : fork
        }
        if 'followers' in user:
            repository['user_name'] = user['name']
        else:
            repository['org_name'] = user['name']
        forked = repo.find('p', {'class':'repo-list-info'})
        if forked:
            fork_from = forked.find('a').get_text() 
            repository['fork_from'] = fork_from
            repository['forked'] = True
        else:
            fork = int(aTag[1].get_text().strip('\n').strip().replace(',',''))
            repository['forks'] = fork
            forks += fork
        repository['language'] = ''
        lang = repo.find('span', {'itemprop':'programmingLanguage'})
        if lang:
            repository['language'] = lang.get_text().strip('\n').strip().replace(',','')
        repositories.append(repository)
    user['stars'] = stars
    user['forks'] = forks
    return repositories
    
def getFollowers(followersUrl, headers, page=0):
    request = urllib.request.Request(followersUrl, headers = headers)
    source_code = urllib.request.urlopen(request).read()
    plain_text = source_code.decode("utf-8")
    soup = BeautifulSoup(plain_text, "html.parser")
    followerList = soup.find_all('span', {'class':'css-truncate css-truncate-target'})
    followers = []
    for follower in followerList:
        aTag = follower.find('a')
        if aTag:
            followers.append(aTag.get('href').replace('/',''))
    if page == 0:
        allFollowers = int(soup.find('span', {'class':'counter'}).get_text().strip('\n').strip().replace(',',''))
        for i in range(2, 2 + allFollowers // 51):
            followers.extend(getFollowers(followersUrl+"?page="+str(i), headers, page=i))
    return followers

def saveUser(user):
    config = {
        'user': 'root',
        'password': 'root', 
        'host': '127.0.0.1',
        'database': 'github-spider'
    }
    add_user = ("INSERT INTO user (name, full_name, email, bio, url, company, location, join_time, followers, starred, following, stars, forks) VALUES "
                "(%(name)s, %(full_name)s, %(email)s, %(bio)s, %(url)s, %(company)s, %(location)s, %(join_time)s, %(followers)s, %(starred)s, %(following)s, %(stars)s, %(forks)s)")
    cnx = cur = None
    try:
        cnx = mysql.connector.connect(**config)
    except mysql.connector.Error as err:
        print(err + user.name)
    else:
        cur = cnx.cursor()
        cur.execute(add_user, user)
        cnx.commit();
    finally:
        if cur:
            cur.close()
        if cnx:
            cnx.close()
    

def saveUserOrgsRelation(user, organizations, redis):
    if organizations == None:
        return
    config = {
        'user': 'root',
        'password': 'root', 
        'host': '127.0.0.1',
        'database': 'github-spider'
    }
    add_rel = ("INSERT INTO user_org_rel (user_name, org_name) VALUES (%s, %s)")
    cnx = cur = None
    try:
        cnx = mysql.connector.connect(**config)
    except mysql.connector.Error as err:
        print(err + user.name)
    else:
        cur = cnx.cursor()
        for org in organizations:
            redis.sadd('githubspider-toScanOrganizations', org)
            rel = (user['name'], org)
            cur.execute(add_rel, rel)
        cnx.commit();
    finally:
        if cur:
            cur.close()
        if cnx:
            cnx.close()
    

def saveRepositories(repositories):
    if repositories == None:
        return
    config = {
        'user': 'root',
        'password': 'root', 
        'host': '127.0.0.1',
        'database': 'github-spider'
    }
    add_repository = ("INSERT INTO repository (name,  user_name, org_name, description, updated, language, stars, forks, forked, fork_from) VALUES" 
                    "(%(name)s, %(user_name)s, %(org_name)s, %(description)s, %(updated)s, %(language)s, %(stars)s, %(forks)s, %(forked)s, %(fork_from)s)")
    add_forked_repository = ("INSERT INTO repository (name, user_name, org_name, description, updated, language, stars, forks) VALUES" 
                    "(%(name)s, %(user_name)s, %(org_name)s, %(description)s, %(updated)s, %(language)s, %(stars)s, %(forks)s)")
    
    cnx = cur = None
    try:
        cnx = mysql.connector.connect(**config)
    except mysql.connector.Error as err:
        print(err)
    else:
        cur = cnx.cursor()
        for org in repositories:
            if 'forked' in org:
                cur.execute(add_repository, org)
            else:
                cur.execute(add_forked_repository, org)
        cnx.commit();
    finally:
        if cur:
            cur.close()
        if cnx:
            cnx.close()

def startSpider():
    user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'
    headers = { 'User-Agent' : user_agent }
    redisConfig = {
        'host': '192.168.1.104',
        'port': 6379,
        'password':123
    }
    r = redis.StrictRedis(**redisConfig)
    name = r.spop("githubspider-toScanUsers").decode()
    while name != None:
        if r.sismember("githubspider-scannedUsers", name):
            print("已经爬取过该用户：%s"%name)
            name = r.spop("githubspider-toScanUsers").decode()
            continue
        if r.sadd("githubspider-scanningUsers", name) == 0:
            print("其它线程正在抓取该用户：%s"%name)
            name = r.spop("githubspider-toScanUsers").decode()
            continue
        print(name)
        userHome = "https://github.com/" + name
        repositoryUrl = userHome + "?tab=repositories"
        followersUrl = userHome + "/followers"
        try:
            user = getMainInfo(userHome, headers)
            organizations = None
            if 'organizations' in user:
                organizations = user.pop('organizations') 
            repositories = getUserRepositories(repositoryUrl, headers, user)
            saveUser(user)
            saveRepositories(repositories)
            saveUserOrgsRelation(user, organizations, r)
            r.sadd("githubspider-scannedUsers", name)
            followers = getFollowers(followersUrl, headers)
            for follower in followers:
                if not r.sismember("githubspider-scannedUsers", follower):
                    r.sadd("githubspider-toScanUsers", follower)
        except Exception:
            exstr = traceback.format_exc()
            print(exstr + " %s"%name)
            print(user)
        r.srem("githubspider-scanningUsers", name)
        name = r.spop("githubspider-toScanUsers").decode()

def init():
    config = {
        'user': 'root',
        'password': 'root', 
        'host': '127.0.0.1',
        'database': 'github-spider'
    }
    redisConfig = {
        'host': '192.168.1.104',
        'port': 6379,
        'password':123
    }
    r = redis.StrictRedis(**redisConfig)
    select_user = ("select name from user")
    cnx = cur = None
    try:
        cnx = mysql.connector.connect(**config)
    except mysql.connector.Error as err:
        print(err)
    else:
        cur = cnx.cursor()
        cur.execute(select_user)
        data = cur.fetchall()
        r.delete("githubspider-scanningUsers")
        r.delete("githubspider-scannedUsers")
        r.delete("githubspider-toScanUsers")
        for name in data:
            r.sadd("githubspider-scannedUsers", name)
        r.sadd("githubspider-toScanUsers", 'willin')
        r.sadd("githubspider-toScanUsers", 'mzupan')
        r.sadd("githubspider-toScanUsers", 'matthewfitz')
    finally:
        if cur:
            cur.close()
        if cnx:
            cnx.close()


def scrapyOrganizations():
    user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'
    headers = { 'User-Agent' : user_agent }
    config = {
        'user': 'root',
        'password': 'root', 
        'host': '127.0.0.1',
        'database': 'github-spider'
    }
    redisConfig = {
        'host': '192.168.1.104',
        'port': 6379,
        'password':123
    }
    r = redis.StrictRedis(**redisConfig)
    while True:
        name = r.spop("githubspider-toScanOrganizations")
        if name == None:
            time.sleep(5)
            continue
        name = name.decode()
        if r.sismember("githubspider-scannedOrgs", name):
            print("已经爬取过该组织：%s"%name)
            continue
        if r.sadd("githubspider-scanningUsers", name) == 0:
            print("其它线程正在抓取该组织：%s"%name)
            continue
        print(name)
        orgHome = "https://github.com/" + name
        try:
            request = urllib.request.Request(orgHome, headers = headers)
            source_code = urllib.request.urlopen(request).read()
            plain_text = source_code.decode("utf-8")
            soup = BeautifulSoup(plain_text, "html.parser")
            description = soup.find('h1', {'class':'org-name lh-condensed'}).get_text().strip('\n').strip()
            locationTag = soup.find('span', {'itemprop':'location'})
            location = ""
            if locationTag:
                location = locationTag.get_text()
            urlTag = soup.find('a', {'itemprop':'url'})
            url = ""
            if urlTag:
                url = urlTag.get_text()
            people = soup.find('span', {'class':'counter'}).get_text()
            add_organization = ("insert into organization(name, description, location, url, people, stars, forks) values"
                                "(%(name)s,%(description)s,%(location)s,%(url)s,%(people)s, %(stars)s, %(forks)s)")
            organization = {
                'name':name, 
                'description':description, 
                'location':location,
                'url':url, 
                'people':people
            }
            repositories = getRepositories(soup, organization)
            saveRepositories(repositories)
            cnx = cur = None
            try:
                cnx = mysql.connector.connect(**config)
            except mysql.connector.Error as err:
                print(err)
            else:
                cur = cnx.cursor()
                cur.execute(add_organization, organization)
                cnx.commit()
            finally:
                if cur:
                    cur.close()
                if cnx:
                    cnx.close()
            r.sadd("githubspider-scannedOrgs", name)
            r.srem("githubspider-scanningUsers", name)
        except Exception:
            exstr = traceback.format_exc()
            print(exstr + " %s"%name)


if __name__ == '__main__':
#     init()
    threads = []
    for i in range(0,3):
        threads.append(threading.Thread(target=startSpider))
    for i in range(0,5):
        threads.append(threading.Thread(target=scrapyOrganizations))
    for t in threads:
        t.start()
