#!/usr/local/bin/python
#-*- encoding:utf-8 -*- 
 
from whoosh.index import create_in  
from whoosh.fields import *  
#from chinesetokenizer  import ChineseAnalyzer
import ConfigParser
import jieba
import jieba.analyse
#import re
analyzer = jieba.analyse.ChineseAnalyzer();
config = ConfigParser.ConfigParser();
config.read('config');
f = formats.Frequency();
g = formats.Frequency();
schema = Schema(appid = ID(stored=True), content=TEXT(stored=True, analyzer=analyzer, vector=f), description=TEXT(stored=True, analyzer=analyzer));  
ix = create_in(config.get('bm25', 'BackupIndex'), schema)  

writer = ix.writer();
f = open(config.get('bm25', 'corpus'));
k = 0;
count = 0
for line in f:
    k += 1;
    print k;
    li = line.split('\t', 2);
    if len(li) != 3:
        continue;
    if li[2].strip() == '':
        description = u'the';
        count +=1;
        continue;
    else:
        description = li[2].strip().decode('utf-8');
        description = re.sub('\s', u'', description);
    if li[1].strip() == '':
        content = u' ';
        count +=1;
        continue;
    else:
        content = li[1].strip().decode('utf-8');
    writer.add_document(appid=li[0].decode('utf-8'), content=content, description=description);
print count; 
writer.commit()

f.close();

