__author__ = 'chimezieogbuji'

import os, string, random, pyparsing, time, re, amara
from cStringIO            import StringIO
from webob                import Request
from akamu.config.dataset import ConnectToDataset, GetGraphStoreForProtocol
from akamu.config.dataset import ConfigureTriclops, GetExternalGraphStoreURL
from akamu.diglot         import layercake_mimetypes, XML_MT
from akamu.diglot         import layercake_parse_mimetypes
from amara.lib            import iri
from amara.xslt           import transform
from akara                import request
from wsgiref.util         import shift_path_info, request_uri
from rdflib.Graph         import Graph,ConjunctiveGraph
from rdflib.sparql        import parser as sparql_parser
from rdflib               import OWL, RDF, RDFS, URIRef, BNode, Namespace
from rdflib               import store, plugin, Literal
from rdflib.store         import Store
from akamu.util           import enum
from amara.lib.iri        import uri_to_os_path

RESULT_FORMAT = enum(CSV='CSV',TSV='TSV',JSON='JSON',XML='XML')
SD_FORMATS = ['application/rdf+xml','text/turtle','text/n3','text/plain']
MIME_SERIALIZATIONS = {
    'application/rdf+xml' : 'pretty-xml',
    'text/turtle'         : 'turtle',
    'text/n3'             : 'n3',
    'text/plain'          : 'ntriples'
}

DefaultGraph_NS = Namespace('tag:metacognition.info,2012:DefaultGraphs#')

def random_filename(chars=string.hexdigits, length=16, prefix='',
                    suffix='', verify=True, attempts=10):
    """
    From - http://ltslashgt.com/2007/07/23/random-filenames/
    """
    for attempt in range(attempts):
        filename = ''.join([random.choice(chars) for i in range(length)])
        filename = prefix + filename + suffix
        if not verify or not os.path.exists(filename):
            return filename

class NoEmptyGraphSupport(Exception):
    def __init__(self):
        super(NoEmptyGraphSupport, self).__init__(
            "Implementation does not support empty graphs"
        )

def RequestedGraphContent(req,store,datasetName):
    graphParamValue = req.params.get('graph')
    if 'default' in req.params:
        graph = Graph(identifier=DefaultGraph_NS[datasetName],store=store)
    else:
        graphIri        = graphParamValue if graphParamValue else req.url
        graph           = Graph(identifier=URIRef(graphIri),store=store)
    return graph

def HandleGET(req,environ,start_response,store,datasetName):
    graph           = RequestedGraphContent(req,store,datasetName)
    if not graph and 'default' not in req.params:
        raise NoEmptyGraphSupport()

    requestedRDF = req.accept.best_match(list(layercake_mimetypes))
    if 'HTTP_ACCEPT' not in environ or not requestedRDF:
        requestedMT = 'application/rdf+xml'
        format      = 'pretty-xml'
    elif requestedRDF:
        preferredMT = req.accept.best_match(layercake_mimetypes)
        requestedMT = preferredMT
        format = layercake_mimetypes[preferredMT]
    content = graph.serialize(format=format)
    start_response("200 Ok",
        [("Content-Type"  , requestedMT),
         ("Content-Length", len(content))]
    )
    return content

def HandlePUT(req,start_response,store,datasetName):
    graph           = RequestedGraphContent(req,store,datasetName)
    nonexistent = not graph and 'default' not in req.params
    if not req.content_type:
        rt = "Didn't provide an RDF Content-type header"
        start_response("400 Bad Request",
            [("Content-Length",  len(rt))])
        return rt
    else:
        format = layercake_parse_mimetypes.get(
            req.content_type,
            layercake_mimetypes[req.content_type])
        try:
            payloadGraph = Graph().parse(StringIO(req.body),format = format)
        except Exception, e:
            rt = e.message
            start_response("400 Bad Request",
                [("Content-Length",  len(rt))])
            return rt
    print req.url, graph.identifier, req.params,bool(payloadGraph)
    if payloadGraph:
        graph.remove((None,None,None))
        for s,p,o in payloadGraph:
            graph.add((s,p,o))
        store.commit()
        if nonexistent:
            start_response("201 Created",[])
        else:
            start_response("204 No Content",[])
        return ""
    else:
        #Empty graph
#        raise NoEmptyGraphSupport()
        start_response("200 Ok",[])
        return "NOOP: server doesn't support empty graphs"

def HandleDELETE(req,start_response,store,datasetName):
    graph           = RequestedGraphContent(req,store,datasetName)
    if not graph and 'default' not in req.params:
        raise NoEmptyGraphSupport()
    else:
        graph.remove((None,None,None))
        store.commit()
        start_response("200 Ok",[])
        return ""

def handleTrailingSlash(url,strip=True):
    if strip:
        return url if url[-1]!='/' else url[:-1]
    else:
        return url+'/' if url[-1]!='/' else url

def HandlePOST(req,start_response,store,graphStore,externalGS,datasetName):
    graph           = RequestedGraphContent(req,store,datasetName)

    if not req.content_type:
        rt = "Didn't provide an RDF Content-type header"
        start_response("400 Bad Request",
            [("Content-Length",  len(rt))])
        return rt
    if handleTrailingSlash(req.url) == handleTrailingSlash(graphStore):
        #If the request IRI identifies the underlying Graph Store, the origin
        #server MUST create a new RDF graph comprised of the statements in
        #the RDF payload and return a designated graph IRI associated with
        #the new graph. The new graph IRI should be specified in the Location
        #HTTP header along with a 201 Created code and be different from the
        #request IRI.
        new_filename = random_filename(suffix=req.params.get('suffix',''))

        new_location = iri.absolutize(
            new_filename,
            handleTrailingSlash(graphStore,strip=False)
        )
        new_location = URIRef(new_location)

        external_new_location = iri.absolutize(
            new_filename,
            handleTrailingSlash(
                externalGS if externalGS else graphStore,
                strip=False)
        )

        try:
            Graph(identifier=new_location,store=store).parse(
                StringIO(req.body),
                format   = layercake_parse_mimetypes[req.content_type]
            )
        except Exception, e:
            rt = e.message
            start_response("400 Bad Request",
                [("Content-Length",  len(rt))])
            return rt
        store.commit()
        start_response("201 Created",
            [("Location", external_new_location),
             ("Content-Length",  0)])
        return ''
    else:
        toAdd    = []
        canMerge = True
        if req.content_type == 'multipart/form-data':
            import cgi
            form = cgi.FieldStorage(
                fp=StringIO(req.body),
                environ=request.environ
            )
            try:
                for multipartEntry in form:
                    for triple in Graph().parse(
                        StringIO(form.getvalue(multipartEntry)),
                        format   = layercake_parse_mimetypes[form[multipartEntry].type]
                    ):
                        s,p,o = triple
                        if triple not in graph:
                            toAdd.append((s,p,o,graph))
                        elif [term for term in triple if isinstance(term,BNode)]:
                            #Blank node in common is shared, no support for this currently
                            #See: http://www.w3.org/TR/rdf-mt/#defmerge
                            canMerge = False
                            break
            except Exception, e:
                rt = str(e)
                start_response("400 Bad Request",
                    [("Content-Length",  len(rt))])
                return rt
        else:
            try:
                for triple in Graph().parse(
                    StringIO(req.body),
                    format   = layercake_parse_mimetypes[req.content_type]
                ):
                    s,p,o = triple
                    if triple not in graph:
                        toAdd.append((s,p,o,graph))
                    elif [term for term in triple if isinstance(term,BNode)]:
                        #Blank node in common is shared, no support for this currently
                        #See: http://www.w3.org/TR/rdf-mt/#defmerge
                        canMerge = False
                        break
            except Exception, e:
                rt = str(e)
                start_response("400 Bad Request",
                    [("Content-Length",  len(rt))])
                return rt
        if not canMerge:
            rt = "Merge involving shared blank nodes not supported"
            start_response("409 Conflict",
                [("Content-Length",  len(rt))])
            return rt
        else:
            graph.addN(toAdd)
            store.commit()
            start_response("200 Ok",[])
            return ''

class graph_store_protocol(object):
    """

    """
    def __init__(self):
        self.datasetName,self.gs_url = GetGraphStoreForProtocol()
        self.store                   = ConnectToDataset(self.datasetName)
        self.external_gs_url         = GetExternalGraphStoreURL()

    def __call__(self, func):
        def innerHandler(environ, start_response):
            req = Request(environ)
            try:
                if req.method   in ['HEAD','GET']:
                    rt = HandleGET(req,environ,start_response,self.store,self.datasetName)
                    if req.method == 'GET':
                        return rt
                    else:
                        return ''
                elif req.method == 'PUT':
                    return HandlePUT(req,start_response,self.store,self.datasetName)
                elif req.method == 'DELETE':
                    return HandleDELETE(req,start_response,self.store,self.datasetName)
                elif req.method == 'PATCH':
                    rt="PATCH not supported"
                    start_response("405 Method Not Allowed",
                        [("Content-Length",  len(rt))]
                    )
                    return rt
                elif req.method == 'POST':
                    return HandlePOST(
                        req,
                        start_response,
                        self.store,
                        self.gs_url,
                        self.external_gs_url,
                        self.datasetName)
                else:
                    start_response("405 Method Not Allowed",[])
                    return "Method not allowed for this resource"
            except NoEmptyGraphSupport, e:
                rt = "Implementation does not support empty graphs"
                start_response("404 Method Not Allowed",
                    [("Content-Length",  len(rt))])
                return rt
            except NotImplementedError, e:
                raise e
        return innerHandler

def GetResultFormats(results,xslt_dir,result_format=RESULT_FORMAT.XML):
    query_results = results.serialize(format='xml')

    if result_format == RESULT_FORMAT.JSON:
        serialization = transform(
            query_results,
            os.path.join(xslt_dir,'sparqlxml2json.xsl'),
            params={ u'ignore-bnode' : True }
        )
    elif result_format == RESULT_FORMAT.TSV:
        serialization  = transform(
            query_results,
            os.path.join(xslt_dir,'xml-to-csv-tsv.xslt'),
            params={u'standard':True}
        )
    else:
        serialization  = transform(
            query_results,
            os.path.join(xslt_dir,'xml-to-csv-tsv.xslt'),
            params={u'standard':True,u'tsv':False}
        )
    return serialization

QUERY_LIST_ENTRY=\
"""
<tr>
    <td>
        <a href="%s?query=%s&action=edit">%s</a>
    </td>
    <td>%s</td>
    <td>%s</td>
    <td>
        %s
    </td>
</tr>"""

SPARQL_FORM=\
"""
<?xml version="1.0" encoding="utf-8"?>
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
  <head>
    <title>SPARQL Kiosk</title>
    CODEMIRROR
     <script>
function submitQuery(formId) {
    document.getElementById(formId).submit();
}
function submitQueryStop(formId){
    document.getElementById(formId).action="/processes";
    document.getElementById(formId).submit();
}
function getTicket(formId){
    document.getElementById(formId).method="get";
    document.getElementById(formId).action="/ticket";
    document.getElementById(formId).submit();
}
     </script>
  </head>
  <body>
    <div style="margin-right: 10em">
      <!--h2>Triclops: <a href="http://www.w3.org/TR/rdf-sparql-query">SPARQL</a> Kiosk</h2>
      <p>The list of (long-)running queries can be <a href="%s/processes" target="_blank">managed</a>.</p-->
      ENTAILMENT
      <form id="queryform" action="ENDPOINT" method="post">
        <!--hidden ticket-->
        <div>
        </div>
        <!-- Default Grap IRI: <input type="text" size="80" name="default-graph-uri" id="default-graph-uri" value=""/ -->
        <div>

        <textarea id="query" name="query" cols="120" rows="30">
#Example query (all classes in dataset)
SELECT DISTINCT ?Concept where {
    [] a ?Concept
}
        </textarea>
        </div>
        <script>
            var editor = CodeMirror.fromTextArea(document.getElementById("query"), {
                mode: "application/x-sparql-query",
                tabMode: "indent",
                matchBrackets: true
            });
        </script>
        <select name="resultFormat">
          <option value="xml" selected="on">SPARQL XML (rendered as XHTML)</OPTION>
          <option value="csv">SPARQL XML (rendered as tab delimited)</OPTION>
          <!--option value="csv-pure">Tab delimited</OPTION-->
          <option value="json">JSON</OPTION>
          <option value="tsv">Tab-separated values (TSV)</OPTION>
          <option value="csv-spec">Comma-separated values</OPTION>
        </select>
        <input type="button" value="Submit SPARQL" onClick="submitQuery('queryform')" />
        </div>
      </form>
    </div>
    <!--h3>SPARQL Queries to Manage</h3>
    <table width='100%%' style='font:10pt arial,sans-serif;'>
        <tr>
          <th width=50%%' align='left'>Query name</th>
          <th align='left'>Query last modification date</th>
          <th align='left'>Date last run</th>
          <th align='left'>Number of results</th>
        </tr>
        QUERIES
    </table-->
    <hr />
    <table style='font:8pt arial,sans-serif;'>
      <thead>
          <tr><td colspan='2'>Preset namespace bindings</td></tr>
      </thead>
      <tbody>BINDINGS</tbody>
    </table>
    <div style="font-size: 10pt; margin: 0 1.8em 1em 0; text-align: center;">Powered by <a href="http://codemirror.net/">CodeMirror</a:q>, <a href="http://code.google.com/p/python-dlp/wiki/LayerCakePythonDivergence">layercake-python</a> (<em><strong>RDF</strong></em>), and <a href="http://code.google.com/p/akamu">Akamu</a> (<em><strong>HTTP</strong></em> &amp; <em><strong>XML</strong></em>)</div>
  </body>
</html>
"""

CODEMIRROR_SETUP=\
"""
<script
    src="%s"
    type="text/javascript">
</script>
<script src="%s" type="text/javascript"></script>
<script src="%s" type="text/javascript"></script>
<style type="text/css">
  .CodeMirror {border-top: 1px solid black; border-bottom: 1px solid black;}
  .activeline {background: #f0fcff !important;}
</style>
<link rel="stylesheet" type="text/css" href="%s"/>
<link rel="stylesheet" type="text/css" href="%s"/>
"""
#     return ProtocolHandler(environ, start_response)
# else:
#     return FormHandler(environ, start_response)

PROTOCOL_HANDLER = 0
FORM_HANDLER     = 1
QUERY_MANAGER    = 2

class ConfigurationManager(object):
    """
    Stores global configuration and provides a method for retrieving
    the underlying SPARQL service graph
    """
    def __init__(self,
                 global_conf,
                 nsBindings = {},
                 defaultDerivedPreds = [],
                 litProps = None,
                 resProps = None,
                 definingOntology = None,
                 ontGraph = None,
                 ruleSet = None,
                 builtinTemplateGraph = None):
        self.builtinTemplateGraph = builtinTemplateGraph
        self.ruleSet = ruleSet
        self.definingOntology   = definingOntology
        self.ontGraph           = ontGraph
        self.store_id           = global_conf.get('store_identifier')
        self.connection         = global_conf.get('connection')
        self.storeKind          = global_conf.get('store')
        self.layout             = global_conf.get('graphVizLayout')
        self.vizualization      = global_conf.get('visualization')
        self.endpoint           = global_conf['endpoint']
        self.litProps           = litProps
        self.resProps           = resProps
        self.nsBindings         = nsBindings
        self.defaultDerivedPreds= defaultDerivedPreds
        self.entailmentN3       = global_conf.get('entailment_n3')
        self.dataStoreOWL       = global_conf.get('datastore_owl')
        self.topDownEntailment  = global_conf.get('topDownEntailment',False)
        self.debugQuery         = global_conf.get('debugQuery',False)
        self.ignoreBase         = global_conf.get('NO_BASE_RESOLUTION',False)
        self.ignoreQueryDataset = global_conf.get('IgnoreQueryDataset',False)
        MYSQL_ORDER             = global_conf.get('MYSQL_ORDER',False)
        noFilterEstimation      = global_conf.get('DISABLE_SELECTION_ESTIMATION',False)
        self.proxy              = global_conf.get('sparql_proxy')
        self.bNodeAsURI         = global_conf.get('bNodeAsURI')
        self.manageQueries      = global_conf.get('manageQueries')
        self.queryManager       = global_conf.get('queryMgr')
        self.endpointURL        = global_conf.get('endpointURL')

        if self.proxy:
            print "A proxy SPARQL server for ", self.proxy
        elif MYSQL_ORDER or noFilterEstimation:
            #modification to the SPARQL evaluation methods
            from rdflib.sparql.sql.RdfSqlBuilder import DEFAULT_OPT_FLAGS, \
                OPT_JOIN_GREEDY_STOCKER_STATS, OPT_JOIN_GREEDY_SELECTION
            if MYSQL_ORDER:
                DEFAULT_OPT_FLAGS[OPT_JOIN_GREEDY_STOCKER_STATS]=False
                DEFAULT_OPT_FLAGS[OPT_JOIN_GREEDY_SELECTION]    =False
                assert not noFilterEstimation,\
                "Cannot use both MYSQL_ORDER and DISABLE_SELECTION_ESTIMATION!"
            elif noFilterEstimation:
                DEFAULT_OPT_FLAGS[OPT_JOIN_GREEDY_SELECTION]    =False

        from rdflib.sparql import Algebra
        Algebra.DAWG_DATASET_COMPLIANCE = False

    def buildGraph(self,default_graph_uri=None):
        if self.proxy:
            store = plugin.get('SPARQL',Store)(self.proxy,bNodeAsURI = self.bNodeAsURI)
        else:
            store = plugin.get(self.storeKind,Store)(self.store_id)
            store.open(self.connection,create=False)
            #The MySQL store has a special set of attribute for optimizing
            #SPARQL queries based on the characteristics of RDF properties
            #used in the queries
            if self.storeKind == 'MySQL' and self.dataStoreOWL:
                print "Updating the property optimization parameters to the store"
                store.literal_properties = self.litProps
                store.resource_properties= self.resProps
        if default_graph_uri:
            targetGraph = Graph(store,identifier = URIRef(default_graph_uri))
        else:
            targetGraph = ConjunctiveGraph(store)

        return targetGraph

class sparql_rdf_protocol(ConfigurationManager):
    """
    Prepares a Triclops WSGI application for use to wrap
     the Akara via the 'wsgi_wrapper' keyword argument of
     @simple_service and @service

    The first argument is the path used with @simple_service or @service and
    datasetName is the name of the Akamu graph store in the configuration that the
    protocol uses for the queries.

    The decorated akamu service implementation returns one of:
        - PROTOCOL_HANDLER
        - FORM_HANDLER

    to specify whether the service is for the protocol or the SPARQL form

    """
    def __init__(self, root, datasetName, absoluteRoot = '/'):
        self.nsBindings    = {  u'owl' :OWL.OWLNS,
                                u'rdf' :RDF.RDFNS,
                                u'rdfs':RDFS.RDFSNS}
        self.litProps = set()
        self.resProps = set()
        self.absoluteRoot       = absoluteRoot
        self.root               = root
        self.datasetName        = datasetName
        self.conf,nsBindings    = ConfigureTriclops(
                                    self.datasetName,
                                    self.nsBindings,
                                    self.litProps,
                                    self.resProps)
        self.nsBindings.update(nsBindings)
        super(sparql_rdf_protocol, self).__init__(
            self.conf,
            self.nsBindings,
            [],
            self.litProps,
            self.resProps,
            Graph(),
            Graph(),
            set(),
            Graph())

    def __call__(self, func):

        def FormHandler(environ, start_response):
            req = Request(environ)
            reqMeth = req.method
            if reqMeth != 'GET':
                rt = 'SPARQL query form must be retrieved via GET!'
                status = '405 Method Not Allowed'
                response_headers = [
                    ('Content-type'  , 'text/plain'),
                    ('Content-Length', len(rt))
                ]
                start_response(status, response_headers)
                yield rt
                return

            status = '200 OK'
            bindingsHTML=''.join(['<tr><td>%s</td><td>%s</td></tr>'%(prefix,uri)
                                  for prefix,uri in self.nsBindings.items()])
            retVal=SPARQL_FORM.replace('ENDPOINT',self.endpoint).replace(
                'BINDINGS',
                bindingsHTML)

            retVal=retVal.replace('CODEMIRROR',CODEMIRROR_SETUP%(
                os.path.join(self.absoluteRoot,'codemirror/lib/codemirror.js'),
                os.path.join(self.absoluteRoot,'codemirror/addon/edit/matchbrackets.js'),
                os.path.join(self.absoluteRoot,'codemirror/mode/sparql/sparql.js'),
                os.path.join(self.absoluteRoot,'codemirror/doc/docs.css'),
                os.path.join(self.absoluteRoot,'codemirror/lib/codemirror.css')
            ))
            retVal=retVal%(self.endpoint)
            entailmentRepl=''

            if False:#self.manageQueries:# @@TODO: Query management
                entries = []
                for fN in sorted([_fname
                                  for _fname in os.listdir(self.manageQueries)
                                 ]):
                    fName = os.path.join(self.manageQueries,fN)
                    fObj = open(fName)
                    doc = bindery.parse(fObj.read())
                    _id=createDigest(U(doc.Query.name)).hexdigest()
                    resultFName = os.path.join(self.manageQueries,
                        '%s.rq.results.xml'
                    )%_id
                    entries.append(
                        QUERY_LIST_ENTRY%
                        (os.path.join(self.queryManager),
                         createDigest(U(doc.Query.name)).hexdigest(),
                         U(doc.Query.name).encode('ascii'),
                         time.ctime(os.lstat(fName).st_mtime),
                         time.ctime(os.lstat(resultFName).st_mtime)
                         if os.path.exists(resultFName) else 'N/A',
                         '<a href="%s?action=update&query=%s&innerAction=load">%s</a>'%(
                             self.queryManager,
                             _id,
                             int(bindery.parse(resultFName).xml_select(
                                 'count(/*[local-name()="sparql"]/*[local-name()="results"]/*)'))
                             ) if os.path.exists(resultFName) else 'N/A')
                    )
                retVal=retVal.replace('QUERIES','\n'.join(entries))

            # if self.topDownEntailment:
            #     entailmentRepl = '<div><em>This server has an <strong><a href="%s/entailment">active</a></strong> entailment regime!</em></div><br/>'%(
            #         self.endpoint)
            retVal=retVal.replace('ENTAILMENT','')

            # retVal=retVal.replace('<!--CancelButton-->',
            #     '<input type="button" value="\'Prepare\' Query" onClick="getTicket(\'queryform\')"></input>')
            response_headers = [('Content-type','text/html'),
                ('Content-Length',
                 len(retVal))]
            start_response(status, response_headers)
            yield retVal
            return

        def ProtocolHandler(environ, start_response):
            req = Request(environ)
            d                 = req.params
            query             = d.get('query')
            ticket            = d.get('ticket')
            default_graph_uri = d.get('default-graph-uri')
            rtFormat          = d.get('resultFormat')

            if 'query' in d and len(filter(lambda i:i == 'query',d))>1:
                rt = "Malformed SPARQL Query: query parameter provided twice"
                status = '400 Bad Request'
                response_headers = [('Content-type','text/plain'),
                                    ('Content-Length',
                                     len(rt))]
                start_response(status,response_headers)
                return rt

            if req.method == 'POST':
                if req.content_type == 'application/sparql-query':
                    query = req.body
                elif req.content_type == 'application/x-www-form-urlencoded':
                    query = req.POST.get('query')

            print "## Query ##\n", query, "\n###########"
            print "Default graph uri ", default_graph_uri
            requestedFormat = environ.get('HTTP_ACCEPT','application/rdf+xml')

            if req.method == 'POST':
                assert query,"POST can only take an encoded query or a query in the body"
            elif req.method == 'GET' and not query:
                if requestedFormat not in SD_FORMATS:
                    requestedFormat = 'application/rdf+xml'
                if self.ignoreQueryDataset:
                    targetGraph = self.buildGraph(default_graph_uri)
                else:
                    targetGraph = self.buildGraph(default_graph_uri=None)

                sdGraph = Graph()

                SD_NS  = Namespace('http://www.w3.org/ns/sparql-service-description#')
                SCOVO  = Namespace('http://purl.org/NET/scovo#')
                VOID   = Namespace('http://rdfs.org/ns/void#')
                FORMAT = Namespace('http://www.w3.org/ns/formats/')

                sdGraph.bind(u'sd',SD_NS)
                sdGraph.bind(u'scovo',SCOVO)
                sdGraph.bind(u'void',VOID)
                sdGraph.bind(u'format',FORMAT)

                service     = BNode()
                datasetNode = BNode()
                if self.endpointURL:
                    sdGraph.add((service,SD_NS.endpoint,URIRef(self.endpointURL)))
                sdGraph.add((service,SD_NS.supportedLanguage        ,SD_NS.SPARQL10Query))
                sdGraph.add((service,RDF.type                       ,SD_NS.Service))
                sdGraph.add((service,SD_NS.defaultDatasetDescription,datasetNode))
                sdGraph.add((service,SD_NS.resultFormat,FORMAT['SPARQL_Results_XML']))
                sdGraph.add((datasetNode,RDF.type,SD_NS.Dataset))

                for graph in targetGraph.store.contexts():
                    graphNode  = BNode()
                    graphNode2 = BNode()
                    sdGraph.add((datasetNode,SD_NS.namedGraph,graphNode))
                    sdGraph.add((graphNode,SD_NS.name,URIRef(graph.identifier)))
                    sdGraph.add((graphNode,SD_NS.graph,graphNode2))
                    sdGraph.add((graphNode,RDF.type,SD_NS.NamedGraph))
                    sdGraph.add((graphNode2,RDF.type,SD_NS.Graph))
                    noTriples = Literal(len(graph))
                    sdGraph.add((graphNode2,VOID.triples,noTriples))
                doc = sdGraph.serialize(
                    format=MIME_SERIALIZATIONS[requestedFormat])
                status = '200 OK'
                response_headers = [
                    ('Content-type'  , requestedFormat),
                    ('Content-Length', len(doc))
                ]
                start_response(status,response_headers)
                return doc
            else:
                assert req.method == 'GET',"Either POST or GET method!"
            if self.ignoreQueryDataset:
                self.targetGraph = self.buildGraph(default_graph_uri)
            else:
                self.targetGraph = self.buildGraph(default_graph_uri=None)

            origQuery = query
            describePattern=re.compile(r'DESCRIBE\s+\<(?P<iri>[^\>]+)\>',re.DOTALL)
            describeQueryMatch = describePattern.match(query)
            if describeQueryMatch:
                iri=URIRef(describeQueryMatch.group('iri'))
                g=Graph()
                for p,u in self.targetGraph.namespaces():
                    g.bind(p,u)
                for t in self.targetGraph.triples((None,None,iri)):
                    g.add(t)
                for t in self.targetGraph.triples((iri,None,None)):
                    g.add(t)
                rt=g.serialize(format='pretty-xml')
                status = '200 OK'
                response_headers = [('Content-type','application/rdf+xml'),
                                    ('Content-Length',
                                     len(rt))]
                start_response(status,response_headers)
                return rt
            try:
                query=sparql_parser.parse(query)
            except pyparsing.ParseException, e:
                rt = "Malformed SPARQL Query: %s"%repr(e)
                status = '400 Bad Request'
                response_headers = [('Content-type','text/plain'),
                                    ('Content-Length',
                                     len(rt))]
                start_response(status,response_headers)
                return rt

            start = time.time()

            if self.ignoreBase and hasattr(query,'prolog') and query.prolog:
                query.prolog.baseDeclaration=None
            if self.ignoreQueryDataset and hasattr(query.query,'dataSets') and query.query.dataSets:
                print "Ignoring query-specified datasets: ", query.query.dataSets
                query.query.dataSets = []

            if not self.proxy and ticket:
                #Add entry for current thread in ticket -> thread id lookup
                global ticketLookup
                ticketLookup[ticket]=self.targetGraph.store._db.thread_id()

                #Run the actual query
            rt = self.targetGraph.query(origQuery,
                initNs=self.nsBindings,
                DEBUG=self.debugQuery,
                parsedQuery=query)
            print "Time to execute SPARQL query: ", time.time() - start
            qRT = rt.serialize(format='xml')
            self.targetGraph.close()
            print "Time to execute and seralize SPARQL query: ", time.time() - start
            print "# of bindings: ", rt.noAnswers

            if rtFormat in ['xml','csv'] or not rtFormat:
#                from amara.bindery import parse
#                doc = parse(src)
#                pi = doc.xml_processing_instruction_factory(
#                    u"xml-stylesheet",
#                    u'href="%s" type="text/xsl"'%self.transform)
#                doc.xml_insert(0,pi)
#                return doc
                rt = qRT
                # imt='application/sparql-results+xml'
                # imt='application/xml'
                doc=amara.tree.parse(rt)
                fName = 'xml-to-html.xslt' if rtFormat == 'xml' else 'xml-to-csv.xslt'
                stylesheetPath = os.path.join(self.conf['akamu_xslt'],fName)
                imt='application/xml'
                pi = doc.xml_processing_instruction_factory(
                    "xml-stylesheet",
                   "type='text/xml' href='%s'"%stylesheetPath)
                # pi = rtDoc.createProcessingInstruction("xml-stylesheet",
                #    "type='text/xml' href='%s'"%stylesheetPath)
                #Add a stylesheet instruction to direct browsers how to render the result document
                doc.xml_insert(0,pi)
                # rtDoc.insertBefore(pi, rtDoc.documentElement)
                out = StringIO()
                doc.xml_write(stream=out)
                # PrettyPrint(rtDoc, stream=out)
                rt = out.getvalue()
            elif rtFormat == 'json':
                rt = GetResultFormats(
                    rt,
                    self.conf['sparql_result_xslt'],
                    result_format=RESULT_FORMAT.JSON
                )
                imt='application/sparql-results+json'
            elif rtFormat == 'tsv':
                rt = GetResultFormats(
                    rt,
                    self.conf['sparql_result_xslt'],
                    result_format=RESULT_FORMAT.TSV
                )
                imt='text/tab-separated-values'
            elif rtFormat == 'csv-spec':
                rt = GetResultFormats(
                    rt,
                    self.conf['sparql_result_xslt'],
                    result_format=RESULT_FORMAT.CSV
                )
                imt='text/csv'
            status = '200 OK'
            response_headers = [('Content-type',imt),
                                ('Content-Length',len(rt))]
            start_response(status, response_headers)
            return rt

        service_map = {
            PROTOCOL_HANDLER    : ProtocolHandler,
            FORM_HANDLER        : FormHandler,
        }
        def Handler(environ, start_response):
            return service_map[func(environ, start_response)](environ,
                                                              start_response)
        return Handler