#!/usr/bin/env python

# (c) 2007-2011 Helsinki University of Technology and University of Helsinki
# Licensed under the MIT license; see LICENSE.txt for more information.

from harava.map import Mapper
from harava.rdf import *
from harava.lrucache import LRUCache

import sys
import urllib

ONKI_TERO = "http://www.yso.fi/onki/tero/"
#ONKI_TERO = "http://localhost:8888/onki-tero/"
#ONKI_TERO = "http://seco.hut.fi/onkiserver/onki-tero/"

MAX_ATTEMPT = 5

CACHE_SIZE = 1000

KWLOOKUPSCHEMES = {
  'fi': ('ts.tero', 'ts.finmesh'),
  'sv': ('ts.tero-sv', 'ts.swemesh'),
  'en': ('ts.tero-en', 'dcterms.mesh'),
  None: ('ts.tero',),
}


class TSPublicationMapper (Mapper):
  def __init__(self, session):
    Mapper.__init__(self, session)
    self._session = session
    self._cache = LRUCache(CACHE_SIZE)
    self._cacheHits = 0
    self._cacheMisses = 0
    

  def get_equivs(self, uri, attempt=1):
    if uri in self._cache:
      self._cacheHits += 1
      return self._cache[uri]
    self._cacheMisses += 1
  
    url = "%s?rdf=%d&c=%s" % (ONKI_TERO, attempt, urllib.quote(uri, ''))
    fh = urllib.urlopen(url)
    graph = getGraph()
    try:
      graph.parse(fh)
    except:
      print >>sys.stderr, "Parse error for url", url, "attempt:", attempt
      if attempt >= MAX_ATTEMPT:
        print >>sys.stderr, "Giving up"
        return []
      return self.get_equivs(uri, attempt+1)
    fh.close()
    ret = list(graph.subjects(SKOSMAP.exactMatch, uri))
    self._cache[uri] = ret
    return ret

  def process(self, doc):
    lookup = self._session.getLookupService()

    # document language is used to qualify some literals
    docLanguage = doc.getMetadata(DC.language)
    if docLanguage:
      docLanguage = str(list(docLanguage)[0])
    else:
      docLanguage = None

    # create/resolve encoded FOAF instances
    for prop in (DC.publisher, DC.creator):
      for value in list(doc.getMetadata(prop)):
        if isinstance(value, Literal):
          if value.datatype == TS.FOAFPerson:
            doc.clearMetadata(prop, value)
            doc.setMetadataInstance(DC.creator, FOAF.Person, FOAF.name, Literal(unicode(value)))
          elif value.datatype in (TS.FOAFOrganization, TS.FOAFAgent, TS.FOAFGroup):

            if value.datatype == TS.FOAFOrganization:
              t = FOAF.Organization
            elif value.datatype == TS.FOAFAgent:
              t = FOAF.Agent
            elif value.datatype == TS.FOAFGroup:
              t = FOAF.Group

            doc.clearMetadata(prop, value)
            agents = lookup.lookup(doc, 'ts.agent', unicode(value), warn=False)
            if agents:
              for agent in agents:
                doc.setMetadata(prop, agent)
            else:
              doc.setMetadataInstance(prop, t, FOAF.name, Literal(unicode(value)))
  
  
    # resolve encoding scheme qualified literals to URIs using the lookup service
    for prop in (TS.genre, DC.type, DCTERMS.audience, DC.subject):
      for value in list(doc.getMetadata(prop)):
        if isinstance(value, Literal) and value.datatype is not None:
          uris = lookup.lookup(doc, unicode(value.datatype), unicode(value))
          if uris:
            doc.clearMetadata(prop, value)
            for uri in uris:
              doc.setMetadata(prop, uri)

    # move any remaining DC.subject literals to TS.keyword field
    for subj in list(doc.getMetadata(DC.subject)):
      if isinstance(subj, Literal):
        doc.clearMetadata(DC.subject, subj)
        language = subj.language
        if not language and subj.datatype:
          language = lookup.getSchemeLanguage(unicode(subj.datatype))[0]
        doc.setMetadata(TS.keyword, Literal(unicode(subj), language))
    
    # Process the TS.keyword values, trying to find matches in TERO
    # If matches are found, move the matched value to DC.subject
    if docLanguage in KWLOOKUPSCHEMES:
      schemes = KWLOOKUPSCHEMES[docLanguage]
    else: # unknown/nonexistent docLanguage, use default
      schemes = KWLOOKUPSCHEMES[None]
    
    for kw in list(doc.getMetadata(TS.keyword)):
      for scheme in schemes:
        uris = lookup.lookup(doc, scheme, unicode(kw), language=kw.language, warn=False)
        if uris:
          doc.log('NOTE', 'map', 'Keyword lookup successful using scheme %s' % scheme, kw)
          doc.clearMetadata(TS.keyword, kw)
          for uri in uris:
            doc.setMetadata(DC.subject, uri)
          break
  
    # project document subjects to corresponding TERO subjects
    newsubjs = []
    for subj in doc.getMetadata(DC.subject):
      if subj.startswith(TERO):
        continue # no need to project TERO subjects
#      print >>sys.stderr, "finding linked subjects for", subj
#      subjs = self._subjectLinker.getLinkedResources(subj)
      subjs = self.get_equivs(subj)
      if len(subjs) == 0:
        doc.log('WARN', 'map', 'No TERO subjects found for subject', subj)
      elif len(subjs) > 1:
        doc.log('WARN', 'map', 'Ambiguous subject', subj)
      for conc in subjs:
        newsubjs.append(conc)

#        print >>sys.stderr, "  found:", conc

    for subj in newsubjs:
      doc.setMetadata(DC.subject, subj)
    
    
    # remove non-TERO subjects
    clearlist = []
    for subj in doc.getMetadata(DC.subject):
      if not subj.startswith(TERO):
        clearlist.append(subj)
    for subj in clearlist:
      doc.clearMetadata(DC.subject, subj)
    

    # remove old style audiences
    clearlist = []
    for aud in doc.getMetadata(DCTERMS.audience):
      if not aud.startswith(SECTION):
        clearlist.append(aud)
    for aud in clearlist:
#      print "dropping old style audience", aud
      doc.clearMetadata(DCTERMS.audience, aud)

    doc.setCompleted('map')
    self._session.schedule(doc)
    return True

  def finish(self):
    if self._cacheHits + self._cacheMisses > 0:
      hitrate = 100.0 * self._cacheHits / (self._cacheHits + self._cacheMisses)
    else:
      hitrate = -1
    if self._session.getVerbose():
      print "Mapper (%s): cache size %d, hit rate %.1f percent" % (self._session.getSource().getName(), 
                                                                   len(self._cache), hitrate)

    # possibly free some memory
    del self._cache
    
