# -*- coding: utf-8 -*-
# filename: handle.py

import hashlib
import time
from multiprocessing import Queue, Process
from scrapy.settings import Settings
from alimama.spiders.taobaoalmama import TaobaoalmamaSpider
import receive
import reply
from twisted.protocols.amp import Argument
import web
from twisted.internet import reactor
from scrapy.crawler import CrawlerRunner
from scrapy.utils.log import configure_logging


class Handle(object):
    def GET(self):
        try:
            data = web.input()
            if len(data) == 0:
                return "hello, this is handlessss view"
            signature = data.signature
            timestamp = data.timestamp
            nonce = data.nonce
            echostr = data.echostr
            token = "gxzrzzq"  # 请按照公众平台官网\基本配置中信息填写

            list = [token, timestamp, nonce]
            list.sort()
            str_list1 = ''.join(list)
            print(str_list1)
            sha1 = hashlib.sha1()
            sha1.update(str_list1.encode('utf-8'))
            hashcode = sha1.hexdigest()
            print("handle/GET func: hashcode, signature: ", hashcode, signature, data)
            if hashcode == signature:
                return echostr
            else:
                return ""
        except (Exception, Argument):
            return Argument
    def POST(self):
        try:
            webData = web.data()
            print("Handle Post webdata is ", webData)
            # 后台打日志
            recMsg = receive.parse_xml(webData)
            toUser = recMsg.FromUserName
            fromUser = recMsg.ToUserName
            text = recMsg.Content.decode("UTF-8")
            print('10100101'+text)
            if isinstance(recMsg, receive.Msg) and recMsg.MsgType == 'text':
                # 启动爬虫,加入队列和线程数据共享
                print('在启动爬虫001')
                q = Queue(9)
                p = Process(target=run_proc, args=(text, q))
                t0 = time.time()
                p.start()
                result = q.get()
                #p.join(timeout=0.5)
                #p.close
                t1 = time.time()
                print(t1 - t0)
                if result is not None:
                    print('-----奇怪之处：'+str(result))

                # 获取爬虫得到的数据

                #
                content = result
                replyMsg = reply.TextMsg(toUser, fromUser, content)
                return replyMsg.send()

            else:
                print("暂且不处理")
                content = "暂且不处理"
                replyMsg = reply.TextMsg(toUser, fromUser, content)
                return replyMsg.send()
                # return "success"
        except (Exception, Argument):
            return Argument


def run_proc(args, q):
    try:
        settings = Settings({
            'ITEM_PIPELINES': {
                'alimama.pipelines.AlimamaPipeline': 300,
            }
        })
        configure_logging({'LOG_FORMAT': '%(levelname)s: %(message)s'})
        runner = CrawlerRunner(settings)
        TaobaoalmamaSpider.searchkey = args
        d = runner.crawl(TaobaoalmamaSpider)
        d.addBoth(lambda _: reactor.stop())
        reactor.run()
        q.put(TaobaoalmamaSpider.items)
    except Exception as e:
        q.put(e)

