#!/usr/bin/env python3
import pycurl, curl
import sys
import os
import io  # .StringIO
import json
import time
import re  # always useful ;]
import shelve

class Settings:
    """
    If you are reading this, you should probably edit this class
    """
    # XXX: LOOK HERE
    user = ''  # auth token should go here if you use one (leave the password empty in that case)
    password = ''

    starting_user = 'andrewts'
    node_num_limit = 100000

class GH_API:
    """
    GitHub API

    This class defines functions to access data on the githubs thru its API
    """
    # TODO: redesign to allow multiple class instances

    if Settings.user or __name__ == '__main__' and len(sys.argv) >= 2:
        [Settings.user, Settings.password] = sys.argv[1].split(':', maxsplit=1)
        _url_prefix = 'https://{0}:{1}@api.github.com'.format(Settings.user, Settings.password)
    else:
        _url_prefix = 'https://api.github.com'

    def go(url):
        """
        Return de-JSON-ified python object with the given url
        """
        # Check request rate, sleep if necessary.
        #c = pycurl.Curl()
        #response = io.StringIO()
        #headers = io.StringIO()
        #c.setopt(pycurl.URL, url)
        #c.setopt(pycurl.HTTPHEADER, ['Accept: application/json', 'Connection: close'])
        #c.setopt(pycurl.WRITEFUNCTION, response.write)
        #c.setopt(pycurl.HEADERFUNCTION, headers.write)
        #c.setopt(pycurl.FOLLOWLOCATION, 0)
        #c.perform()  # FIXME: pycurl.error: (23, 'Failed writing body (1333 != 1334)')
        #response.seek(0)
        #answer = json.load(response)

        global g_curl
        g_curl.set_url(url)
        pycurl_poop = False

        try: response = g_curl.get()  # poop
        except pycurl.error as e: print('exception in g_curl.get(): {0}'.format(e.args)); pycurl_poop = True  # MULTIPLE STATEMENTS ON ONE LINE - DEAL WITH IT
        except: pass
        req_tries = 1
        while g_curl.info()['http-code'] not in (200, 204, 304) and req_tries < 7:
            print('retrying, code {0} for url {1}'.format(g_curl.info()['http-code'], url))
            print('sleeping for {0} s'.format(2 ** (req_tries - 1)))
            time.sleep(2 ** (req_tries - 1))
            try: response = g_curl.get(); pycurl_poop = False  # DEAL WITH IT
            except pycurl.error as e: print('exception in g_curl.get(): {0}'.format(e.args)); pycurl_poop = True  # I SAID DEAL WITH IT
            except: time.sleep(1)
            req_tries += 1
        if req_tries >= 7 or pycurl_poop:
            return None
        headers = g_curl.header().split('\r\n')
        answer = json.loads(response)

        for header in headers:
            # TODO: clean up this crap
            remaining_requests_m = re.match(r'^X-RateLimit-Remaining: (\d+)$', header)
            if remaining_requests_m:
                remaining_requests = int(remaining_requests_m.groups()[0])
            wake_up_time_m = re.match(r'^X-RateLimit-Reset: (\d+)$', header)
            if wake_up_time_m:
                wake_up_time = int(wake_up_time_m.groups()[0])

        # FIXME: fault-tolerance
        try:
            fmt = '{0} requests remaining until  {1}'.format(remaining_requests, time.asctime(time.localtime(wake_up_time)))
            print(fmt, file=sys.stderr)
        except:
            print('=' * 79)
            print('something bad happened')
            for header in headers:
                print(header)
            print('=' * 79)
            raise

        if remaining_requests is not None and remaining_requests <= 2:
            nap_time = wake_up_time - time.time() + 1
            print('Rate limit exceeded, sleeping for {0} seconds'.format(nap_time))
            time.sleep(nap_time)
        return answer

    def user_dict(user):
        url = GH_API._url_prefix + '/users/' + user
        ans = GH_API.go(url)
        if ans is None:
            # user died before we got to know him. how sad :(
            return {'login': user, 'following': 0, 'followers': 0, 'type': 'DEAD'}
        return ans

    def _user_follow0_list(user, number, suffix='followers'):
        url = GH_API._url_prefix + '/users/' + user + '/' + suffix
        follow0 = GH_API.go(url)
        if len(follow0) > 0:
            max_results_per_page = len(follow0)
            tot_pages = number // max_results_per_page + 1
            page_i = 2
            while page_i <= tot_pages:
                follow0 += GH_API.go(url + '?page={0}'.format(page_i))
                page_i += 1
        if type(follow0) != type(list()):
            print('Unexpected behavior in GH_API.user_follow0_list: received object is not a list')
            return []
        return follow0

    def user_followers_list(user, number):
        return GH_API._user_follow0_list(user, number, 'followers')

    def user_following_list(user, number):
        return GH_API._user_follow0_list(user, number, 'following')

class GH_User:
    """
    This structure represents a github user
    """

    Allowed_fields = { 'name', 'email', 'login', 'id', 'type', 'site_admin',
            'location', 'hireable', 'public_repos', 'followers', 'following',
            'created_at', 'updated_at', 'company', 'bio', 'disk_usage' }

    def __init__(self, *pargs, **kwargs):
        """
        Make a GH_User from a dictionary (returned by GH_API.go() function)
        """
        if len(pargs) == 1 and type(pargs[0]) == type(dict()):
            user_data = pargs[0]
        for (attr, val) in user_data.items():
            if attr in GH_User.Allowed_fields:
                if attr in ('followers', 'following', 'public_repos'):
                    setattr(self, attr + '_number', val)  # self-explanatory
                else:
                    setattr(self, attr, val)
        self.followers = []
        self.following = []

    def fetch_neighbors(self):
        if getattr(self, 'login', None) is None:
            print("{0}: 'login' is null (API changed?)".format(self))
            return 1
            #raise Exception  # TODO: come up with exceptions for all the badness that could happen
        if getattr(self, 'type', None) is None:
            print("{0}: user has no 'type' field".format(self))
            return 2
        if getattr(self, 'type') == 'DEAD':
            print('{0}: user died while they were waiting in the queue'.format(self))
            return
        if self.followers_number > 0:
            L = GH_API.user_followers_list(self.login, self.followers_number)
            self.followers = [x['login'] for x in L]
        if self.following_number > 0:
            L = GH_API.user_following_list(self.login, self.following_number)
            self.following = [x['login'] for x in L]

    def __str__(self):
        return '<GH_User  ({2}) , followers={0} following={1}>'.format(getattr(self, 'followers_number', ''),
                getattr(self, 'following_number', ''), getattr(self, 'login', ''))

def walk_githubs(*args, **kwargs):
    if not 'resume' in args:
        pool = {}  # where we store the graph ('login': <GH_user instance>)
        queue = []
        queued_nodes = set()
        node_num = 1
        node_num_limit = Settings.node_num_limit
        log_msgs = []

        gh_user = GH_User(GH_API.user_dict(Settings.starting_user))
        gh_user.fetch_neighbors()
        pool[gh_user.login] = gh_user
        print(pool, file=sys.stderr)

        queued_nodes = set(gh_user.followers) | set(gh_user.following)
        queue = sorted(list(queued_nodes))  # because why not
        print(queue, file=sys.stderr)
    else:
        # read vars from the savefile
        if not os.access('github_soc_net_users_graph_resume', os.R_OK|os.W_OK):
            print('resume file not found')
            return 1
        shlv = shelve.open('github_soc_net_users_graph_resume')
        pool = shlv['pool']
        queue = shlv['queue']
        queued_nodes = shlv['queued_nodes']
        (node_num, node_num_limit) = shlv['nodes_counter']
        log_msgs = shlv['log_msgs']
        shlv.close()
        del shlv
        print('loaded pool (size={0}), queue (size={1}), queued_nodes (size={2}), '.format(len(pool), len(queue), len(queued_nodes)), end='')
        print('node_num={0}, node_num_limit={1}, log_msgs (size={2})'.format(node_num, node_num_limit, len(log_msgs)))
        print('resuming ...')
        time.sleep(1)

    while len(queue) > 0 and node_num < node_num_limit:
        node_num += 1
        user = queue.pop(0)
        queued_nodes.remove(user)
        print('processing user {0}'.format(user))
        gh_user = GH_User(GH_API.user_dict(user))
        gh_user.sequence_number = node_num
        if gh_user.login != user:
            print('very strange, gh_user.login != user ({0}, {1})'.format(gh_user, user))

        if gh_user.following_number >= 1000 and gh_user.followers_number <= 1500:
            fmt = '{0}: phony user detected, skipping its neighbors'.format(gh_user)
            print(fmt)
            log_msgs.append(fmt)
            gh_user.type = '-Phony user account'
        else:
            gh_user.fetch_neighbors()
            if gh_user.login in pool:
                print('User {0} is already in the pool somehow'.format(gh_user))
            else:
                pool[gh_user.login] = gh_user
                for neigh in gh_user.followers + gh_user.following:
                    if (neigh not in queued_nodes) and (neigh not in pool):
                        queued_nodes.add(neigh)
                        queue.append(neigh)
        print('queue length {0}, queued nodes {1}, pool size {2}'.format(len(queue), len(queued_nodes), len(pool)))

        if len(queue) > 1000000:
            fmt = 'queue length ({0}) exceeded 1M, shrinking to 500K (current user: {1}, nodes in pool: {2})'.format(len(queue), gh_user.login, node_num)
            print(fmt, file=sys.stderr)
            log_msgs.append(fmt)
            for victim in queue[50000:]:
                queued_nodes.remove(victim)
            queue = queue[:500000]
            if len(queue) != len(queued_nodes):
                fmt = 'queue shrunk with errors'
                print(fmt)
                log_msgs.append(fmt)

        # quicksave
        if (node_num % 1000) == 0:
            print('quicksave ...', end='')
            shlv = shelve.open('github_soc_net_users_graph_quicksave')
            shlv['pool'] = pool
            shlv['queue'] = queue
            shlv['queued_nodes'] = queued_nodes
            shlv['nodes_counter'] = (node_num, node_num_limit)
            shlv['log_msgs'] = log_msgs
            time.sleep(0.5)
            shlv.close()
            print(' complete')
            time.sleep(0.2)
            del shlv

    filename = 'github_soc_net_users_graph'
    while os.access(filename, os.F_OK, follow_symlinks=False) == True:
        filename += '_'
    shlv = shelve.open(filename)
    shlv['pool'] = pool
    shlv['log_msgs'] = log_msgs
    shlv['queued_nodes'] = queued_nodes
    shlv.close()
    print('{0} nodes downloaded and saved to \"{1}\"'.format(len(pool), filename))
    return 0

def main(args=sys.argv):
    """
    hi there!
    """
    print('pycurl.version:', pycurl.version)
    return walk_githubs(*args)

if __name__ == "__main__":
    g_curl = curl.Curl()
    g_curl.set_option(pycurl.HTTPHEADER, ['Accept: application/json',
                                          'Connection: keep-alive'])
    g_curl.set_option(pycurl.FOLLOWLOCATION, 0)
    main()
    g_curl.close()
