#!/usr/bin/env python

# coding: utf-8
from HTMLParser import HTMLParser
import urllib2
import urllib
import json
import time
import sys
import unicodedata
from rdflib.graph import Graph
from rdflib import Literal, Namespace, URIRef, BNode
from rdflib import RDF, RDFS

def removeInvalidChars(to_translate, translate_to=u''):
    not_letters_or_digits = u'(<>"{}|\`^)'
    translate_table = dict((ord(char), translate_to) for char in not_letters_or_digits)
    return to_translate.translate(translate_table)

def uritranslate(to_translate, translate_to=u''):
    not_letters_or_digits = u'!"#%\'()*+,-./:;<=>?@[\]^_`{|}~'
    translate_table = dict((ord(char), translate_to) for char in not_letters_or_digits)
    return to_translate.translate(translate_table)

def camelCase(s, sep=' '):
    s = uritranslate(s)
    s = unicodedata.normalize('NFKD', s).encode('ASCII', 'ignore')
    return ''.join([t.title() for t in s.split(sep)])

def is_valid_url(url):
    import re
    regex = re.compile(
        r'^(?:http|ftp)s?://'  # http:// or https://
        r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|'  # domain...
        r'localhost|'  # localhost...
        r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|'  # ...or ipv4
        r'\[?[A-F0-9]*:[A-F0-9:]+\]?)'  # ...or ipv6
        r'(?::\d+)?'  # optional port
        r'(?:/?|[/?]\S+)$', re.IGNORECASE)
    return url is not None and regex.search(url)

# HTML Parser for decoding HTML escaped characters
p = HTMLParser()
# Get API as parameter
APIID = sys.argv[1];

print >>sys.stderr, "Running peoplespider.py with " + APIID

# Request uri
req = urllib2.Request("https://people.aalto.fi/api/api.php?namelike=%&LANG=eng&APIID="+APIID)
opener = urllib2.build_opener()
response = opener.open(req)
people = json.load(response)

# List all ids from the first request
ids = [item['id'] for item in people]

PPL = Namespace("http://data.aalto.fi/id/people/")
FAC = Namespace("http://data.aalto.fi/id/people/organization/")
ORG = Namespace("http://www.w3.org/ns/org#")
XSD = Namespace("http://www.w3.org/2001/XMLSchema#")
FOAF = Namespace("http://xmlns.com/foaf/0.1/")
SCHEMA = Namespace("http://schema.org/")
AIISO = Namespace("http://purl.org/vocab/aiiso/schema#")
APL = Namespace("https://people.aalto.fi/")
APF = Namespace("https://people.aalto.fi/files/")
DCT = Namespace("http://purl.org/dc/terms/")
SKOS = Namespace('http://www.w3.org/2004/02/skos/core#')
BIBO = Namespace('http://purl.org/ontology/bibo/')
PRISM = Namespace('http://prismstandard.org/namespaces/basic/2.1/')

graph = Graph()

graph.add((FAC["AaltoUniversity"],RDF.type,AIISO["College"]))
graph.add((FAC["AaltoUniversity"],RDFS.label,Literal("Aalto University",lang="en")))
graph.add((FAC["AaltoUniveristy"],RDFS.label,Literal("Aalto Yliopisto",lang="fi")))

print >>sys.stderr, "Number of people/api calls "+str(len(ids))+"\n"

for i in ids:
    time.sleep(1)
    print >>sys.stderr, "."
    
    # Request public data of person in english
    reqUrl = "https://people.aalto.fi/api/api.php?LANG=eng&APIID="+APIID+"&id="+i
    req = urllib2.Request(reqUrl)
    opener = urllib2.build_opener()
    response = opener.open(req)
    
    # Creates python-object from json
    personData = json.load(response)

    # Create basic information about person
    person = URIRef(PPL["person"+i])
    graph.add((person,RDF.type,FOAF["Person"]))
    graph.add((person,RDF.type,DCT["Agent"]))
    graph.add((person,RDF.type,FOAF["Agent"]))
    graph.add((person,FOAF["firstName"],Literal(p.unescape(personData[0]["firstname"]))))
    graph.add((person,FOAF["familyName"],Literal(p.unescape(personData[0]["lastname"]))))
    graph.add((person,FOAF["name"],Literal(p.unescape(personData[0]["firstname"] + " "+p.unescape(personData[0]["lastname"])))))
    graph.add((person,FOAF["workInfoHomepage"],URIRef(APL[personData[0]["backlink"]])))
    graph.add((person,ORG["memberOf"],FAC["AaltoUniversity"]))
    
    # Description of a person
    if "description" in personData[0]:
        graph.add((person,FOAF["plan"],Literal(p.unescape(personData[0]["description"]), lang="en")))
    if "worktitle" in personData[0]:
        graph.add((person,SCHEMA["jobtitle"],Literal(p.unescape(personData[0]["worktitle"]), lang="en")))
    if "imageavailable" in personData[0] and personData[0]["imageavailable"] == "1":
        graph.add((person,FOAF["img"],URIRef(APF[i+"_y_64.jpg"])))
    
    # Persons other accounts
    if "jabberid" in personData[0]:
        graph.add((person,FOAF["jabberID"],Literal(p.unescape(personData[0]["jabberid"]))))  
    if "skypeid" in personData[0]:
        graph.add((person,FOAF["skypeID"],Literal(p.unescape(personData[0]["skypeid"]))))
    # ... continue    
    
    department = None
    school = None
    
    # Persons work school
    if "workschool" in personData[0]:
        school = camelCase(p.unescape(personData[0]["workschool"]))
        graph.add((FAC[school],RDF.type,AIISO["School"]))
        graph.add((FAC[school],FOAF["member"],person))
        graph.add((FAC[school],RDFS.label,Literal(p.unescape(personData[0]["workschool"]), lang="en")))
        graph.add((FAC[school],AIISO["part_of"],FAC["AaltoUniversity"]))
    
    # Persons work department
    if "workdepartment" in personData[0]:
        department = camelCase(p.unescape(personData[0]["workdepartment"]))
        if(school != None): 
            graph.add((FAC[department],AIISO["part_of"],FAC[school]))
        graph.add((FAC[department],RDF.type,AIISO["Department"]))
        graph.add((FAC[department],FOAF["member"],person))
        graph.add((FAC[department],RDFS.label,Literal(p.unescape(personData[0]["workdepartment"]), lang="en")))
        
    # Keywords defined by the person
    if "keywords" in personData[0]:
        keywords = personData[0]["keywords"]
    
        for key in keywords:
            if key["title"] != None:
                keyword = URIRef(PPL["k"+key["id"]])
                graph.add((keyword,RDF.type,SKOS["Concept"]))
                graph.add((keyword,RDFS.label,Literal(p.unescape(key["title"]))))
                graph.add((person,FOAF["topic_interest"],keyword))
                
    # Publications confirmend by the person
    if "publications" in personData[0]:
        publications = personData[0]["publications"]
    
        for publication in publications:
            document = URIRef(PPL["pub"+publication["id"]])
            graph.add((document,RDF.type,FOAF["Document"]))
            graph.add((document,RDF.type,BIBO["Document"]))
            graph.add((document,BIBO["producer"],person))
            graph.add((document,DCT["title"],Literal(p.unescape(publication["title"]))))
            graph.add((document,DCT["date"],Literal(publication["year"],datatype=XSD.gYear)))
            
            # Test if article-url is valid
            if publication["url"] != "" and is_valid_url(publication["url"]) :
                try:
                    graph.add((document,FOAF["homepage"],URIRef(removeInvalidChars(publication["url"]))))
                except:
                    pass
            if publication["isbn"] != "":
                graph.add((document,BIBO["isbn"],Literal(publication["isbn"])))
            if publication["issn"] != "":
                graph.add((document,PRISM["issn"],Literal(publication["issn"])))
            if publication["magazine"] != "":
                magazine = BNode()
                graph.add((document,DCT["isPartOf"],magazine))
                graph.add((magazine,FOAF["name"],Literal(p.unescape(publication["magazine"]))))
                graph.add((magazine,RDF.type,BIBO["Magazine"]))
            if publication["publisher"] != "":
                publisher = BNode()
                graph.add((document,DCT["publisher"],publisher))
                graph.add((publisher,FOAF["name"],Literal(p.unescape(publication["publisher"]))))
                graph.add((publisher,RDF.type,FOAF["Agent"]))
                # continue ...
   
    # Get public data in finnish
    reqUrl = "https://people.aalto.fi/api/api.php?LANG=fin&APIID="+APIID+"&id="+i
    req = urllib2.Request(reqUrl)
    opener = urllib2.build_opener()
    response = opener.open(req)
    personData = json.load(response)
    
    if "description" in personData[0]:
        graph.add((person,FOAF["plan"],Literal(p.unescape(personData[0]["description"]), lang="fi")))
    if "worktitle" in personData[0]:
        graph.add((person,SCHEMA["jobtitle"],Literal(p.unescape(personData[0]["worktitle"]), lang="fi")))
    if "workschool" in personData[0] and school != None:
        graph.add((FAC[school],RDFS.label,Literal(p.unescape(personData[0]["workschool"]), lang="fi")))
    if "workdepartment" in personData[0] and department != None:
        graph.add((FAC[department],RDFS.label,Literal(p.unescape(personData[0]["workdepartment"]), lang="fi")))

graph.serialize(destination=sys.stdout, format='n3')
