# coding:utf-8
# write  by  zhou

import gevent
from gevent.monkey import  patch_all;patch_all()
from celery import Celery
import urllib2
import time
import datetime
import sys
import urlparse
import hashlib

from qiniu import Auth, BucketManager
import redis
import json
import base64
import random
import traceback
import ssl
context = ssl._create_unverified_context()
reload(sys)
sys.setdefaultencoding("utf-8")

#主要通过hash算法+md5算法以及blommfilter实现URL去重机制
#主要是针对海量数据

#算法思路：通过hash算法+md5算法 把字符串转换为唯一标识
    # 使用bm可以压缩字符串大小 使用位存储
class SimpleHash(object):
    def __init__(self, cap, seed):   # 根据不同的参数初始化后作为不同的哈希函数
        self.cap = cap
        self.seed = seed
    # 计算hash  值
    def hash(self, value):
        ret = 0
        for i in range(len(value)):
            ret += self.seed * ret + ord(value[i])  # ord 函数计算传入的url 字符串中的每一个字符在ASCII码表中对应的顺序值
        return (self.cap - 1) & ret  # 返回哈希值，即比特序列中的位置

# 布隆过滤器
class BloomFilter(object):
    def __init__(self, server, key, blockNum=1):
        self.bit_size = 1 << 31   # 布隆过滤器的比特数  # Redis的String类型最大容量为512M，现使用256M
        self.seeds = [5, 7, 11, 13, 31]   # 5个种子指纹  ，用于产生hash 函数
        # self.seeds = [5, 7, 11, 13, 31, 37, 61]
        self.server = server
        self.key = key
        self.blockNum = blockNum
        self.hashfunc = []
        for seed in self.seeds:
            self.hashfunc.append(SimpleHash(self.bit_size, seed))   #对于每一个种子  创建SimpleHash 对象，一共5个种子

    # 判断字符串是否存在
    def isContains(self, str_input):
        str_input = hashlib.md5(str_input).hexdigest()   #  md5 进行唯一字符串
        if not str_input:
            return False
        ret = True

        name = self.key + str(int(str_input[0:2], 16) % self.blockNum)
        for f in self.hashfunc:
            loc = f.hash(str_input)
            ret = ret & self.server.getbit(name, loc)  #  对于5个哈希函数，只要有一个为0 ，那么将返回0，该值不存在。
        return bool(ret)

    # 插入  并非真正的插入并存储，而是把该值对应的5个位置比特位置为1
    def insert(self, str_input):
        str_input = hashlib.md5(str_input).hexdigest()
        name = self.key + str(int(str_input[0:2], 16) % self.blockNum)
        for f in self.hashfunc:
            loc = f.hash(str_input)   #  计算值为1 的比特位
            self.server.setbit(name, loc, 1)


redis = redis.Redis("192.168.8.40",db=6)
bloom = BloomFilter(redis,"image_bloom_filter_qiniu",10)


def md5(str, hex=True):
    '获取字符串的md5校验'
    m = hashlib.md5()
    m.update(str)
    if hex == True:
        return m.hexdigest()
    else:
        return m.digest()


app = Celery()
app.conf.task_ignore_result = True
app.conf.task_queue_max_priority = 255

def qiniu_spider_man(url,key):
    access_key = 'e6NsTpfl6Ctf3vWW8k1pJjSBnQ0-WvLgPJroCxiP'
    secret_key = 'C14vJPkeng3dIEVL0mH0GjfLz4KL7SmyhD9y64cj'
    bucket_name = 'imgse'
    q =Auth(access_key,secret_key)
    bucket = BucketManager(q)
    return bucket.fetch(url,bucket_name,key.lstrip("/"))


def write_to_hbase(table,row_key,column,data):
    req = urllib2.Request("http://192.168.14.2:8080/%s/a"%table)
    req.add_header("Content-Type","application/json")
    req.add_header("Accept","application/json")
    req.get_method = lambda :"PUT"
    data = json.dumps({"Row":[{"key":base64.b64encode(row_key), "Cell": [{"column":base64.b64encode(column),
                                                                          "$":base64.b64encode(data)}]}]})
    req.data=data
    rsp = urllib2.urlopen(req,timeout=None)
    rsp.close()
    return True


def img_url_handle(img_url):
    try:
        _ = img_url.split(".")
        a,b = ".".join(_[:-1]),_[-1]
        if b.startswith("jpg"):
            return a+".jpg"
        if b.startswith("jpeg"):
            return a+".jpeg"
        if b.startswith("png"):
            return a+".png"
        if b.startswith("gif"):
            return a+".gif"
        if b.startswith(".JPEG"):
            return a+".JPEG"
        if b.startswith(".JPG"):
            return a+".JPG"
        raise Exception
    except:
        return img_url

@app.task(ignore_result=True)
def crawl_to_qiniu(url,save_path, timeout=12,
                  retries=10,**kwargs):
    "图片爬虫,爬取到的数据进行base64编码"
    local_info = locals()
    try:
        if save_path.endswith(".html"):
            return
        if save_path.endswith(".xml"):
            return
        if bloom.isContains(save_path):
            return
        print (url, save_path)
        url_object = urlparse.urlparse(url)
        try:
            write_to_hbase("spider_image_urls", save_path, "info:source_url", url)
        except:
            pass
        with gevent.Timeout(timeout,"spiderman-timeout") as timeout:
            task_result = qiniu_spider_man(url,save_path)
            try:
                write_to_hbase("spider_image_urls", save_path, "info:taskid", task_result[0])
            except:
                pass
        bloom.insert(save_path)
        print "Success %s"%url+ " " + "https://qiniu-imgse.cn.gcimg.net"+save_path+str(task_result)
    except (urllib2.URLError,Exception,BaseException) as exception:
        print "Exception %s %s || %s ||Kwargs:%s"%(save_path,url,str(exception),kwargs)
        #traceback.print_exc()
        if "SSL: TLSV1_ALERT_INTERNAL_ERROR" in  str(exception) or "crawl-timeout" in str(exception):
            return
        if isinstance(exception,urllib2.HTTPError):
            if exception.code == 404:
                return
        if retries >= 1:
            retries -= 1
            local_info["retries"] = retries
            while 1:
                try:
                    app.send_task("rawhttp.qiniu_image_spider.crawl_to_qiniu", kwargs=local_info,
                                          queue="rawhttp.qiniu_image_spider")
                except:
                    pass
                else:
                    break


