# -*- coding: utf-8 -*-
import urllib2
import json
from urllib import urlencode


def request_url(url):
    '''
        Get data from a given URL. If connection failures occur, try 3 times at most.
        Input:  the request string of url
        Output: the result string (for http request, return the html code)
        by Yan WANG
    '''
    fails = 0
    content = ''
    while fails < 3:
        try:
            req = urllib2.Request(url)
            resp = urllib2.urlopen(req, timeout = 10)
            content = resp.read()
            if len(content) > 1:
                break
            else:
                fails += 1
        except Exception as e:
            fails += 1
            print 'Failed for %d time(s)' % fails, e
    return content

def search(search_type, param_dict, root = None):
    '''
        Submit query and return the result in python dict.
        Input:  search_type: follow the given API, could be 'search-expert' or else.
                param_dict: a dict including the parameters, e.g.:
                    {'u':'clockwise', 'q':'data mining', 'start': 10, 'num':3}
        Output: the dict or result, parsed from return json object
        by Yan WANG
    '''
    if root == None:
        root = 'http://arnetminer.org/services/'
    if not root.endswith('/'):
        root += '/'
    root += search_type + '?' + urlencode(param_dict)
    return json.loads(request_url(root))
    

if __name__ == '__main__':
    search_type = 'search-expert'
    param_dict = {'u':'clockwise', 'q':'data mining', 'start':10, 'num':3}
    
    print json.dumps(search(search_type, param_dict), sort_keys = True, indent = 2)
    
