#! /usr/bin/env python
# -*- coding: utf8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html

import MySQLdb
from gaodeha_crawl.items import PostItem
from scrapy.selector import Selector
from gaodeha_crawl.items import ImageItem
from gaodeha_crawl.common.db_utils import DbUtils
from scrapy import log
import urllib2
import os
import Image
import re
import time
import random

class BaseDbPipeline(object):
    """
        abstract db pipeline
    """
    def __init__(self):
        self.db = DbUtils()


    def insertPost(self, item):
        """
            not allow update
            update may orverriding some data comment_count , post_insert_time
        """
        #db = DbUtils()
        if not isinstance(item, PostItem):
            log.msg("not a PostItem" + str(item))
            return
        self.db.executeSql(self._buildSql(item, "tb_post"), dict(item))
        #db.close()


    def insertImage(self, item):
        #db = DbUtils()
        if not isinstance(item, ImageItem):
            log.msg("not a ImageItem" + str(item))
            return
        imageId = self.db.insertSql(self._buildSql(item, "tb_image"), dict(item))
        #db.close()
        return imageId


    def insertComment(self, item):
        pass


    def fetchCrawledUrls(self, website):
        pass

    def _buildSql(self, item, tablename):
        sql = "insert into %s (%s) values (%s)"
        columns = (tablename,
                ",".join(["`%s`" % column for column in item.fields.keys()]),
                  ",".join(["%(" + column + ")s" for column in item.fields.keys()])
                )
        return sql % columns

    def __del__(self):
        self.db.close()
        


class PostDbPipeline(BaseDbPipeline):

    def __init__(self):
        super(PostDbPipeline, self).__init__()
        print "PostDbPipeline __init__"


    def process_item(self, item, spider):
        print "insert item " + str(item)
        self.insertPost(item)
        return item


class ImageDbPipeline(BaseDbPipeline):

    imgPattern = re.compile("<img[^>]+>")
    srcPattern = re.compile("src=['\"]([^\s'\"]+)['\"]")
    altPattern = re.compile("alt=['\"]([^\s'\"]+)+['\"]")
    titlePattern = re.compile("title=['\"]([^\s'\"]+)['\"]")
    user_agent_list = [\
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1",\
        "Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11",\
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6",\
        "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1090.0 Safari/536.6",\
        "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/19.77.34.5 Safari/537.1",\
        "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5",\
        "Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.36 Safari/536.5",\
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",\
        "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",\
        "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",\
        "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",\
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",\
        "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",\
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",\
        "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",\
        "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.0 Safari/536.3",\
        "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24",\
        "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24"
       ]

    def __init__(self):
        super(ImageDbPipeline, self).__init__()
        print "ImageDbPipiline __init"


    def process_item(self, item, spider):
        return self.process_item_image(item)

    def process_item_image(self, item):
        """
        extract image from post content 
        save image 
        and change image src
        """
        content = item["post_content"]
        imageId = ""
        alt = ""
        for m in re.finditer(self.imgPattern, content):
            img = m.group(0)
            msrc = re.search(self.srcPattern, img)
            if msrc:
                log.msg("img:" + img + "group(0)" + m.group(0))
                src = self.absolute_url(msrc.group(1), item["post_origin_url"])
            else:
                log.msg("can not find src property, img=" + img)
                continue
            malt = re.search(self.altPattern, img)
            if not malt:
                malt = re.search(self.titlePattern, img)
            if malt:
                alt = malt.group(1)

            imageId = 0
            sql = "select image_id from tb_image where image_origin_url=%s"
            row = self.db.fetchOneDict(sql, (src,))
            if row and row["image_id"]:
                imageId = row["image_id"]
            else:
                imageId = self.store_image(src, alt)
            newImg = '<img alt="%s" src="%s">' % (alt, imageId)
            #content = re.sub(self.imgPattern, newImg, content, 1)
            content = content.replace(img, newImg)
        item["post_content"] = content
        return item


    def absolute_url(self, link, url):
        if link.startswith("http://") or link.startswith("https://"):
            return link
        elif link.startswith("/"):
            index = url.find("/", len("https://"))
            return url[:index] + link
        else:
            log.msg("error link:" + link)
            return link

    def store_image(self, src, title):
        imageItem = ImageItem()
        imageItem["image_origin_url"] = src
        tmpfile = "/data/image/" + time.strftime('%Y%m%d') + "/" +str(long(time.time())) + src[-8:]
        if not os.path.exists(os.path.dirname(tmpfile)):
            os.makedirs(os.path.dirname(tmpfile))
        for retry in range(3):
            try:
                request = urllib2.Request(src)
                request.add_header("User-Agent", random.choice(self.user_agent_list))
                uf = urllib2.urlopen(request)
                bt = uf.read()
                uf.close()
                f = open(tmpfile, "w")
                f.write(bt)
                f.close()
                image = Image.open(tmpfile)
                imageItem["image_origin_available"] = 1
                imageItem["use_origin_url"] = 0
                imageItem["image_bitmap"] = bt
                imageItem["image_size"] = len(bt)
                imageItem["image_width"] = image.size[0]
                imageItem["image_height"] = image.size[1]
                imageItem["image_title"] = title
                imageItem["image_mime_type"] = image.format
                imageItem["image_update_time"] = long(time.time())
                imageItem["image_final_url"] = ""
                break
            except Exception as e:
                log.msg("error while download image: " + src + "err msg"  + str(e))
                log.msg("retry download image: " + src + ", retry=" + str(retry))
                continue
        else:
            log.msg("can‘t download image: " + src)
            imageItem["image_origin_available"] = 0
            imageItem["use_origin_url"] = 0
            imageItem["image_bitmap"] = ""
            imageItem["image_size"] = 0
            imageItem["image_width"] = 0
            imageItem["image_height"] = 0
            imageItem["image_title"] = title
            imageItem["image_mime_type"] = ""
            imageItem["image_update_time"] = long(time.time())
            imageItem["image_final_url"] = ""
        return self.insertImage(imageItem)










