# -*- coding:utf-8 -*-
import MySQLdb
import sys
import urllib,urllib2,cookielib,time
#reload(sys)
#sys.setdefaultencoding('utf-8')
cxn=MySQLdb.connect(db='pachong',host='localhost',user='root',passwd='123456ms')
cxn.set_character_set('utf8')
cur=cxn.cursor()

decode_list=['utf-8', 'gb2312', 'gbk', 'gb18030', 'cp1252', ]
def decode_data(data):
    for coding in decode_list:
        try:
            data=data.decode(coding)
            return data,coding
        except:
            pass
    return u'',''

proxy_support = urllib2.ProxyHandler({'http':'127.0.0.1:8087'})
cook_jar=cookielib.CookieJar()
cookie_support=urllib2.HTTPCookieProcessor(cook_jar)
opener=urllib2.build_opener(proxy_support,cookie_support,urllib2.HTTPHandler)
urllib2.install_opener(opener)

user_agents='Baiduspider+(+http://www.baidu.com/search/spider.htm)'
headers={
            'User-Agent':user_agents,
            'Accept-Language':'zh-cn,zh;q=0.8,en-us;q=0.5,en;q=0.3',
            #'Referer':'http://www.51cto.com/'
        }

table_name='iteye'

def is_deny_url(url):
    url=url.strip()
    try:
        res = urllib2.urlopen(urllib2.Request(url,headers=headers),timeout=10)
        res.close()
        if url.strip() == res.url.strip():                      #不保存重定向的网页
            return False
        else:
            return True
##    except urllib2.HTTPError,e:
##        if e.code==404 and e.code==500:
##            self.engine.logger.error(err_msg)
##        print str(e)
##    except urllib2.URLError,e:
##        print str(e)
    except Exception,e:
        print str(e)
    return False

def del_url(table_name):
    print table_name
    sql='select min(id),max(id) from %s'%table_name
    cur.execute(sql)
    min_id,max_id=cur.fetchone()            
    print min_id,max_id
    record_num=0
    step=10**6
    idx=min_id
    while idx<=max_id:
        sql_pref='select id,url from %s'%table_name
        sql=sql_pref+' where id between %s and %s'
        cur.execute(sql,(str(idx),str(idx+step)))
        for record in cur.fetchall():
            url=record[1]
            url=decode_data(url)[0]
            if is_deny_url(url):
                #print record[1]
                sql_prefix='delete from %s'%table_name
                sql=sql_prefix+' where id=%s'
                cur.execute(sql,str(record[0]))
                record_num+=1
                if not record_num%10**4:
                    print record_num
        cxn.commit()
                
        idx+=step
    print record_num
table_list=[table_name,]#'%s_queue'%table_name]
for table in table_list:
    del_url(table)
    
cxn.commit()
cur.close()
cxn.close()


print 'end'
