#!/usr/bin/python
#
# Copyright (c) 2009 Julius Volz
# See LICENSE for details.

from xml.etree import ElementTree
import conv, dataset, debug, function, linkspec, metric, rdfpath, silk, complexmatch

loaded = False
config_file = None
config_lines = ''

cache_dir = 'cache'
index_dir = 'index'

verbosity = 0

default_pause = 0
default_page_size = 1000
default_do_cache = True
default_retry_count = 3
default_retry_time = 5000

prefixes = {}
datasources = {}
linkspecs = {}
complexquery = {}

PREFIX_TAG =          'Prefix'
DATASOURCE_TAG =      'DataSource'
ENDPOINT_TAG =        'EndpointURI'
GRAPH_TAG =           'Graph'
DO_CACHE_TAG =        'DoCache'
PAUSE_TAG =           'Pause'
PAGE_SIZE_TAG =       'PageSize'
RETRY_COUNT_TAG =     'RetryCount'
RETRY_PAUSE_TAG =     'RetryPause'
METRIC_TAG =          'Metric'
PARAM_TAG =           'Param'
LINKSPEC_TAG =        'Interlink'
COMPLEX_TAG =	      'ComplexMatching'

LINK_TYPE_TAG =       'LinkType'
SRC_DATASET_TAG =     'SourceDataset'
DST_DATASET_TAG =     'TargetDataset'
RESTRICT_TAG =        'RestrictTo'
PREMATCHING_TAG =     'PreMatchingDefinition'
DST_INDEX_TAG =       'Index'
MATCHING_TAG =        'LinkCondition'
MATCH_TAG =           'Compare'
FUNC_TAG =            'Transform'
THRESHOLDS_TAG =      'Thresholds'
LINK_LIMIT_TAG =      'Limit'
OUTPUT_TAG =          'Output'
SYNC_TAG =            'SyncSettings'
SRC_ENDPOINT_TAG =    'SourceEndpoint'
DST_ENDPOINT_TAG =    'TargetEndpoint'

ID_ATTR =             'id'
METRIC_ID_ATTR =      'metric'
FUNC_ID_ATTR =        'function'
NAMESPACE_ATTR =      'namespace'
INDEX_LIMIT_ATTR =    'hitLimit'
SRC_PATH_ATTR =       'sourcePath'
DST_PATH_ATTR =       'targetPath'
WEIGHT_ATTR =         'weight'
OPTIONAL_ATTR =       'optional'
DEFAULT_VALUE_ATTR =  'default'
PARAM_NAME_ATTR =     'name'
VALUE_PARAM_ATTR =    'value'
PATH_PARAM_ATTR =     'path'
DATASOURCE_ID_ATTR =  'dataSource'
DATASET_VAR_ATTR =    'var'
MAX_LINKS_ATTR =      'max'
FILTER_METHOD_ATTR =  'method'
ACCEPT_FILE_ATTR =    'acceptedLinks'
VERIFY_FILE_ATTR =    'verifyLinks'
OUTPUT_FORMAT_ATTR =  'format'
OUTPUT_MODE_ATTR =    'mode'
OUTPUT_NS_ATTR =      'namespace'


class Error(silk.Error):
  """Used to indicate errors during configuration loading."""


def load(configuration, crawl_uri=None, is_file=True):
  """Load a configuration file and instantiate configuration objects.
  
  Args:
    filename: path to the configuration file to load.

  Returns:
    None 
  """
  global loaded
  global config_file
  global config_lines

  # unload any prior configuration, even if it was only loaded partially
  unload()

  try:
    if is_file:
      config_file = configuration
      config_lines = file(configuration).read()
    else:
      config_lines = configuration
    config = ElementTree.fromstring(config_lines)
  except:
    raise Error('Failed to parse configuration file, is it valid XML?')

  for prefix in config.findall(PREFIX_TAG):
    parse_prefix(prefix)

  for ds in config.findall(DATASOURCE_TAG):
    parse_datasource(ds)

  for mt in config.findall(METRIC_TAG):
    parse_metric(mt)

  if not config.findall(LINKSPEC_TAG):
    raise Error('Could not find any link specifications!')

  for ls in config.findall(LINKSPEC_TAG):
    parse_linkspec(ls, crawl_uri)

  loaded = True

def save():
  """Write back the current configuration to the file it was last loaded from.
  
  If it was not loaded from a file (or not loaded at all), this generates an
  error.
  """

  if loaded and config_file is not None:
    debug.log(1, 'Writing back configuration to file "%s"...' % config_file)
    f = file(config_file, 'w')
    f.write(config_lines)
    f.close()
  else:
    raise Error('No configuration loaded from file, cannot write back to disk!')

def unload():
  global loaded
  prefixes.clear()
  datasources.clear()
  linkspecs.clear()
  complexquery.clear()
  metric.unregister_user_metrics()
  loaded = False

def parse_uri(uri):
  if uri[0] == '<':
    return uri[1:-1]
  else:
    try:
      prefix, element = uri.split(':')
      return prefixes[prefix] + element
    except:
      raise Error('Error parsing prefixed URI "%s"' % uri)

def parse_prefix(prefix):
  """Parse a namespace prefix definition, save prefix in 'prefixes' dict.
  
  Args:
    prefix: ElementTree.Element containing the prefix definition.
  
  Returns:
    None
  """
  prefixes[prefix.get(ID_ATTR)] = prefix.get(NAMESPACE_ATTR)


def parse_datasource(ds):
  """Parse a datasource definition, save DataSource object in 'datasource' dict.
  
  Args:
    ds: ElementTree.Element containing the datasource definition.

  Returns:
    None
  """
  ds_id = ds.get(ID_ATTR)
  if ds_id is None:
    raise Error('Missing ID in datasource definition')

  endpoint = ds.find(ENDPOINT_TAG)
  if endpoint is None:
    raise Error('Missing endpoint URI in datasource definition')

  graph = ds.find(GRAPH_TAG)
  if graph is None:
    graph_name = None
  else:
    graph_name = graph.text

  do_cache = default_do_cache
  if ds.find(DO_CACHE_TAG) is not None:
    if ds.find(DO_CACHE_TAG).text == '1':
      do_cache = True
    else:
      do_cache = False

  pause = default_pause
  if ds.find(PAUSE_TAG) is not None:
    pause = int(ds.find(PAUSE_TAG).text)

  page_size = default_page_size
  if ds.find(PAGE_SIZE_TAG) is not None:
    page_size = int(ds.find(PAGE_SIZE_TAG).text)

  retry_count = default_retry_count
  if ds.find(RETRY_COUNT_TAG) is not None:
    retry_count = int(ds.find(RETRY_COUNT_TAG).text)

  retry_pause = default_retry_time
  if ds.find(RETRY_PAUSE_TAG) is not None:
    retry_pause = int(ds.find(RETRY_PAUSE_TAG).text)

  datasources[ds.get(ID_ATTR)] = dataset.DataSource(ds_id,
                                                    endpoint.text,
                                                    graph_name,
                                                    do_cache,
                                                    page_size,
                                                    pause,
                                                    retry_count,
                                                    retry_pause)


def parse_metric(mt):
  """Parse a metric definition, register UserMetric as metric.

  Args:
    mt: ElementTree.Element containing the metric definition.

  Returns:
    None
  """
  # parse parameters
  params = {}
  for param in mt.findall(PARAM_TAG):
    params[param.get(PARAM_NAME_ATTR)] = [conv.raw]

  # parse submetrics
  aggregation = get_main_aggregation(mt)

  user_metric = metric.UserMetric(mt.get(ID_ATTR), params, aggregation)
  metric.register(user_metric)


def parse_linkspec(ls, crawl_uri):
  """Parse link specification, save LinkSpec object in 'linkspecs' dict.

  Args:
    ls: ElementTree.Element containing the link specification.

  Returns:
    None
  """
  ls_id = ls.get(ID_ATTR)
  link_type = ls.find(LINK_TYPE_TAG)
  src_dataset = ls.find(SRC_DATASET_TAG)
  dst_dataset = ls.find(DST_DATASET_TAG)
  prematching = ls.find(PREMATCHING_TAG)
  matching = ls.find(MATCHING_TAG)
  thresh = ls.find(THRESHOLDS_TAG)
  filter = ls.find(LINK_LIMIT_TAG)

  for cm in ls.findall(COMPLEX_TAG):
    parse_complex(cm)


  out = ls.find(OUTPUT_TAG)
  sync = ls.find(SYNC_TAG)

  if ls_id is None:
    raise Error('Missing ID in linkspec definition')
  if link_type is None:
    raise Error('Missing link type in linkspec "%s"' % ls_id)
  if src_dataset is None:
    raise Error('Missing source dataset in linkspec "%s"' % ls_id)
  if dst_dataset is None:
    raise Error('Missing destination dataset in linkspec "%s"' % ls_id)
  if matching is None:
    raise Error('Missing matching section in linkspec "%s"' % ls_id)
  if thresh is None:
    raise Error('Missing threshold in linkspec "%s"' % ls_id)
  if out is None:
    raise Error('Missing output in linkspec "%s"' % ls_id)

  src_ds = parse_dataset(src_dataset, crawl_uri)
  dst_ds = parse_dataset(dst_dataset)

  restrict = None
  if prematching is not None:
    restrict = parse_prematching(prematching)

  aggregation = get_main_aggregation(matching)

  # TODO: do proper checking for these sections
  filter_def = None
  if filter is not None:
    filter_def = {
      'limit': int(filter.get(MAX_LINKS_ATTR, 1)),
      'method': filter.get(FILTER_METHOD_ATTR, 'metric_value'),
    }

  thresholds = {
    'accept': float(thresh.get('accept')),
    'verify': float(thresh.get('verify'))
  }

  output = {
    'accept': out.get(ACCEPT_FILE_ATTR, 'accept_links.n3'),
    'verify': out.get(VERIFY_FILE_ATTR, 'verify_links.n3'),
    'format': out.get(OUTPUT_FORMAT_ATTR, 'n3'),
    'mode': out.get(OUTPUT_MODE_ATTR, 'append') == 'append' and 'ab' or 'wb',
    'namespace': out.get(OUTPUT_NS_ATTR, 'http://example.com/link/')
  }

  sync_settings = None
  if sync is not None:
    sync_settings = {
      'src_endpoint': sync.find(SRC_ENDPOINT_TAG).get('uri'),
      'dst_endpoint': sync.find(DST_ENDPOINT_TAG).get('uri')
    }

  link_spec = linkspec.LinkSpec(ls_id, parse_uri(link_type.text), src_ds, dst_ds,
                                restrict, aggregation, filter_def, thresholds,
                                output, sync_settings)

  linkspecs[ls.get(ID_ATTR)] = link_spec

def parse_complex(cm):
  
  for query in cm.findall('Query'):
    id = query.get(ID_ATTR)
    replace = query.get('replace')
    met = metric.get(cm.get('metric'))
    least_similar = cm.get('least_similar')

    complexquery[id] = complexmatch.Complex(id,query.text,replace,met,cm.get('accept'),least_similar)

def parse_prematching(pm):
  """Parse a prematching section from a LinkSpec.

  Args:
    pm: ElementTree.Element containing prematching definition.

  Returns:
    dict of prematching options.
  """

  dst_indexes = pm.findall(DST_INDEX_TAG)

  prematch = {
    'src': rdfpath.RDFPath(pm.get(SRC_PATH_ATTR)),
    'limit': int(pm.get(INDEX_LIMIT_ATTR)),
    'dst_indexes': [rdfpath.RDFPath(i.get(DST_PATH_ATTR)) for i in dst_indexes],
  }

  return prematch


def parse_metric_options(mt):
  """Parse options from a metric aggregation or metric call.

  Args:
    mt: ElementTree.Element containing metric aggregation or call.

  Returns:
    dict of options
  """
  default = mt.get(DEFAULT_VALUE_ATTR, None)
  if default is not None:
    default = float(default)

  return {
    'optional': mt.get(OPTIONAL_ATTR, None) and True or False,
    'weight': float(mt.get(WEIGHT_ATTR, 1)),
    'default': default
  }


def get_main_aggregation(el):
  """Parse the first metric aggregation found, return MetricAggregation object.
  
  Args:
    el: ElementTree.Element containing as a child the metric aggregation.
  """
  aggregation = None
  for aggr_type in metric.MetricAggregation.AGGREGATION_TYPES:
    aggr = el.find(aggr_type)
    if aggr is not None:
      aggregation = parse_aggregation(aggr)
      break

  if aggregation is None:
    raise Error('Matching section should contain one of "%s"' %
                ', '.join(metric.MetricAggregation.AGGREGATION_TYPES))

  return aggregation
  

def parse_aggregation(aggr):
  """Parse metric aggregation, return MetricAggregation object.

  Args:
    aggr: ElementTree.Element containing the aggregation definition.

  Returns:
    metric.MetricAggregation  
  """
  metrics = []
  for aggr_type in metric.MetricAggregation.AGGREGATION_TYPES:
    for sub_aggr in aggr.findall(aggr_type):
      options = parse_metric_options(sub_aggr)
      metrics.append((
        parse_aggregation(sub_aggr),
        options
      ))

  for sub_metric in aggr.findall(MATCH_TAG):
    options = parse_metric_options(sub_metric)
    metrics.append((
      parse_function_call(sub_metric, is_metric=True),
      options
    ))

  return metric.MetricAggregation(aggr.tag, metrics)


def parse_function_call(func, is_metric=False):
  """Parse function call, return function.FunctionCall object.
  
  Args:
    func: ElementTree.Element containing the function call.

  Returns:
    func.FunctionCall  
  """
  params = {}
  for param in func.findall(PARAM_TAG):
    if param.get(PARAM_NAME_ATTR) is None:
      raise Error('Passing parameter to function "%s" without a name' %
                  func.get(ID_ATTR))

    params[param.get(PARAM_NAME_ATTR)] = parse_param_pass(param)

  if is_metric:
    metric_id = func.get(METRIC_ID_ATTR)
    if metric_id is None:
      raise Error('Missing metric ID in metric call')
    return metric.MetricCall(metric.get(metric_id), params)
  else:
    func_id = func.get(FUNC_ID_ATTR)
    if func_id is None:
      raise Error('Missing function ID in function call')
    return function.FunctionCall(function.get(func_id), params)


def parse_param_pass(param):
  """Parse passed parameter, return function.Param object.
  
  Args:
    param: ElementTree.Element containing the passed parameter.

  Returns:
    function.Param subclass object
  """
  if VALUE_PARAM_ATTR in param.attrib:
    return function.LiteralParam(param.get(VALUE_PARAM_ATTR))
  elif PATH_PARAM_ATTR in param.attrib:
    return function.RDFPathParam(param.get(PATH_PARAM_ATTR))
  else:
    func_param = param.find(FUNC_TAG)
    if func_param is None:
      raise Error('Missing value for parameter "%s"' %
                  param.get(PARAM_NAME_ATTR))

    func_call = parse_function_call(param.find(FUNC_TAG))
    return function.FunctionParam(func_call)


def parse_dataset(ds, crawl_uri=None):
  """Parse dataset definition, return dataset.Dataset object.
  
  Args:
    param: ElementTree.Element containing the dataset definition.

  Returns:
    dataset.Dataset  
  """
  ds_var = ds.get(DATASET_VAR_ATTR)
  if ds_var is None:
    raise Error('Missing variable name in dataset definition')

  if crawl_uri is not None:
    return dataset.CrawlerDataset(crawl_uri, ds_var)
  else:
    ds_id = ds.get(DATASOURCE_ID_ATTR)
    if ds_id not in datasources:
      raise Error('DataSource "%s" not defined!' % ds_id)

    restrict = None
    restrict_element = ds.find(RESTRICT_TAG)
    if restrict_element is not None:
      restrict = restrict_element.text
      
    return dataset.Dataset(datasources[ds_id], ds_var, restrict)
