#!/usr/bin/env python
# -*- coding: utf-8 -*-

from sphinxapi import *
import sys, time
import conf
import log

mode = SPH_MATCH_ALL
index = '*'
filtercol = conf.search_key
filtervals = []
sortby = ''
groupby = ''
groupsort = '@group desc'
limit = 0

# do query
cl = SphinxClient()
cl.SetServer ( conf.search_host, conf.search_port )
cl.SetWeights ( [100, 1] )
cl.SetMatchMode ( mode )
if filtervals:
    cl.SetFilter ( filtercol, filtervals )
if groupby:
    cl.SetGroupBy ( groupby, SPH_GROUPBY_ATTR, groupsort )
if sortby:
    cl.SetSortMode ( SPH_SORT_EXTENDED, sortby )
if limit:
    cl.SetLimits ( 0, limit, max(limit,1000) )


def find_urlid( s ) :
    ids = []
    res = cl.Query ( s, index )
    if not res:
        log.w( 'query failed: %s' % cl.GetLastError() )
        return ids
    if cl.GetLastWarning():
        log.w( 'WARNING: %s\n' % cl.GetLastWarning() )
    log.i( 'Query stats: \'%s\' retrieved %d of %d matches in %s sec' % (s, res['total'], res['total_found'], res['time']) )
    if res.has_key('words'):
        for info in res['words']:
            log.i( '\t\'%s\' found %d times in %d documents' % (info['word'], info['hits'], info['docs']) )
    if res.has_key('matches'):
        n = 1
        log.i( '\nMatches:' )
        for match in res['matches']:
            attrsdump = ''
            for attr in res['attrs']:
                attrname = attr[0]
                attrtype = attr[1]
                value = match['attrs'][attrname]
                ids.append( long(value) )
                if attrtype==SPH_ATTR_TIMESTAMP:
                    value = time.strftime ( '%Y-%m-%d %H:%M:%S', time.localtime(value) )
                attrsdump = '%s, %s=%s' % ( attrsdump, attrname, value )
            log.i( '%d. doc_id=%s, weight=%d%s' % (n, match['id'], match['weight'], attrsdump) )
            n += 1
    return ids


if __name__ == "__main__" :
    s = '爱情 抱枕'
    ids = search( s )
    print ids

