#!/usr/bin/python

from __future__ import with_statement
from cStringIO import StringIO
#from contrib.sphinxapi import *
import sqlalchemy as sa
from Queue import Queue
from threading import Thread, Semaphore
import threading
from datetime import datetime, date
from time import strftime
import re, traceback, calendar, time, sys

#from local
from lib.database_qzone import Database, g_pagesize
from lib.html_extractor import extract_text
from lib.utils import sanitizestring, transcode3, transcode5
from xml.sax.saxutils import escape
from lib.network import clean_url, fetch_url3
import MySQLdb

workersemaphore = Semaphore(16)

def fixup():
    dal = Database()
    cursor = 0
    workingset = []
    i = 0

    while True:
        workingset = dal._queryUser(i)
        if not workingset:
            return False
        for u in workingset:
            print '"', u[7], '"'
            uid = u[0]
            workerthread = Thread(target = createWorker(dal, uid))
            workersemaphore.acquire()
            workerthread.start()
        i += 1
        

def createWorker(dal, uid):
    def worker():
        print uid
        blogs = dal._queryUserBlog(uid)
        # if there are no bookmarks by user, let's forget it.
        if not blogs:
            workersemaphore.release()
            return

        filters = {}
        for b in blogs:
            if filters.has_key(b[0]):
                filters[b[0]].append(b[4])
            else:
                filters[b[0]] = [b[4]]
        if filters:
            for k,v in filters.iteritems():
                if v and len(v) > 1:
                    allthesame = True
                    for val in v:
                        if val != max(v):
                            allthesame = False
                            break
                    if allthesame:
                        dal.deleteBlog(uid, k, max(v), len(v) - 1)
                    else:
                        dal.deleteBlog(uid, k, max(v))
        workersemaphore.release()

    return worker

fixup()
