# -*- coding: utf-8 -*-
import sys
import os
ROOT_DIR = os.path.dirname(os.path.dirname(__file__))
sys.path.append(ROOT_DIR)
import queue
import threading
import re
import requests
import time
from pymongo import MongoClient

from src.util import ThreadedSafetySet, refresh_login, load_session

try:
    import cookielib
except:
    import http.cookiejar as cookielib

db = MongoClient('localhost', 27017).zhihu_sample_database
# db = MongoClient('localhost', 27017).zhihu_test_db

class BadRequestError(Exception):
    def __init__(self, url, error, resp_body, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.url = url
        self.error = error
        self.resp_body = resp_body

    def __str__(self):
        return 'url:%s, error:%s, resp_body:%s' % (self.url, self.error, self.resp_body)


class Worker(threading.Thread):
    def __init__(self, thread_id, queue, completed_user_id_container, work_sleep=4):
        super().__init__()
        print(thread_id, 'start')
        self.thread_id = thread_id
        self.queue = queue
        self.work_sleep = work_sleep
        self.completed_user_id_container = completed_user_id_container
        self.session = load_session()

    def do_get(self, url, headers=None, timeout=10):
        req_headers = {
            "Host": "www.zhihu.com",
            'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36'
        }
        req_headers.update((headers or {}))
        resp = None
        try:
            resp = self.session.get(url, headers=req_headers, timeout=timeout)
            if resp.status_code != 200:
                raise Exception(resp.status_code)
            return resp.json()
        except Exception as e:
            raise BadRequestError(url, str(e), (resp.content.decode('utf-8') if resp else 'not_resp'))

    def run(self):
        while 1:
            try:
                task = self.queue.get_nowait()
                self.scrap(*task)
                self.queue.task_done()
            except queue.Empty:
                time.sleep(1)

    def scrap(self, url, url_token, user_id):
        offset_token = re.search(r'&offset=(.*)', url)
        offset = offset_token.group(1)
        try:
            json_resp = self.do_get(url, headers={'Referer': 'https://www.zhihu.com/people/' + url_token + '/answers'})
            json_resp['offset_num'] = offset
            json_resp['user_token'] = url_token
            json_resp['user_id'] = user_id

            db.answers.insert_one(json_resp)

            is_end = json_resp['paging']['is_end']
            next_page = json_resp['paging']['next']
            if not is_end:
                self.queue.put((next_page, url_token, user_id))
            else:
                self.completed_user_id_container.add(url_token)
            time.sleep(self.work_sleep)
        except BadRequestError as e:
            self.completed_user_id_container.add(url_token)
            db.fail_url_token.insert_one({'url_token': url_token, 'error': e.error, 'user_id': user_id, 'url': e.url, 'resp_body': e.resp_body})
            print(e)


class Boss(object):
    def __init__(self, offset=0, bulk_size=1000, worker_count=10, check_process_interval=10, limit=0):
        self.offset = offset
        self.worker_count = worker_count
        self.bulk_size = bulk_size
        self.completed_user_id_container = ThreadedSafetySet()
        self.check_process_interval = check_process_interval
        self.completed_count = 0
        self.limit = limit
        self.row_index = 0
        print('offset:%s, bulk_size:%s, limit:%s' % (self.offset, self.bulk_size, self.limit))
        if self.limit % self.bulk_size != 0:
            raise Exception('limit must be integer multiple of bulk_size')

    def iter_batch_tasks(self, offset, bulk_size):
        self.row_index = offset
        while 1:
            user_id_rows = db.user_id.find(filter={}, projection={'idp': 1, '_id': 0}, skip=self.row_index, limit=bulk_size, sort=[("idp", 1)])
            user_id_rows = list(user_id_rows)
            print('----query %s uids' % len(user_id_rows))
            if len(user_id_rows) == 0 or (self.limit and (self.row_index - offset) == self.limit):
                break
            self.row_index += len(user_id_rows)

            idp_list = [r['idp'] for r in user_id_rows]
            tasks = {}
            rows = db.user_info.find(filter={"idp": {"$in": idp_list}}, projection={'idp': 1, 'url_token': 1, '_id': 0})
            for i in rows:
                tasks[i['url_token']] = {'idp': i['idp'], 'url_token': str(i['url_token'])}
            print('----get %s tasks' % len(tasks.values()))
            yield tasks.values()


    def add_one_worker(self, worker_list, worker_queue):
        one_worker = Worker(str(time.time()), worker_queue, self.completed_user_id_container)
        one_worker.daemon = True
        one_worker.start()
        worker_list.append(one_worker)

    def start_work(self):
        worker_queue = queue.Queue()
        worker_list = []
        for i in range(self.worker_count):
            self.add_one_worker(worker_list, worker_queue)
        last_batch_completed_count = 0
        for batch_tasks in self.iter_batch_tasks(self.offset, self.bulk_size):
            batch_tasks_length = len(batch_tasks)
            for t in batch_tasks:
                url_token = t['url_token']
                start_url = 'https://www.zhihu.com/api/v4/members/' + url_token + '/answers?include=data%5B*%5D.is_normal%2Cis_collapsed%2Ccollapse_reason%2Csuggest_edit%2Ccomment_count%2Ccan_comment%2Ccontent%2Cvoteup_count%2Creshipment_settings%2Ccomment_permission%2Cmark_infos%2Ccreated_time%2Cupdated_time%2Crelationship.is_authorized%2Cvoting%2Cis_author%2Cis_thanked%2Cis_nothelp%2Cupvoted_followees%3Bdata%5B*%5D.author.badge%5B%3F(type%3Dbest_answerer)%5D.topics&limit=20&offset=0'
                worker_queue.put((start_url, url_token, t['idp']))

            st = time.time()
            while 1:
                try:
                    print('----in time completed row_index:%s,%s' % ((last_batch_completed_count + len(self.completed_user_id_container)), self.row_index))
                    if len(self.completed_user_id_container) == batch_tasks_length:
                        self.completed_count += len(self.completed_user_id_container)
                        last_batch_completed_count = self.completed_count
                        print('--completed one batch:%s, cost:%s' % (batch_tasks_length, (time.time() - st)))
                        self.completed_user_id_container.empty()
                        break
                    # check worker status
                    dead_thread = 0
                    alive_worker_list = []
                    for w in worker_list:
                        if not w.is_alive():
                            dead_thread += 1
                        else:
                            alive_worker_list.append(w)
                    worker_list = alive_worker_list
                    for i in range(dead_thread):
                        self.add_one_worker(worker_list, worker_queue)
                    time.sleep(self.check_process_interval)
                except KeyboardInterrupt:
                    print('terminated by KeyboardInterrupt')
                    token_list = [t['url_token'] for t in batch_tasks]
                    db.answers.delete_many({"user_token": {"$in": token_list}})
                    db.fail_url_token.delete_many({"user_token": {"$in": token_list}})
                    print('----completed:%s, completed_row_index:%s' % (self.completed_count, (self.row_index - self.bulk_size)))
                    exit(0)

        print('----completed:%s, completed_row_index:%s' % (self.completed_count, self.row_index))


class RetryFail(object):
    def __init__(self):
        self.session = load_session()

    def retry_request(self, i):
        try:
            resp = self.session.get(i['url'], headers={
                "Host": "www.zhihu.com",
                'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36',
                'Referer': 'https://www.zhihu.com/people/' + i['url_token'] + '/answers'
            })
            if resp.status_code == 404:
                db.fail_url_token.delete_one({"_id": i["_id"]})
                print('[404] %s' % i['url'])
            else:
                json_resp = resp.json()
                offset_token = re.search(r'&offset=(.*)', i['url'])
                offset = offset_token.group(1)
                json_resp['offset'] = offset
                json_resp['user_token'] = i['url_token']
                json_resp['user_id'] = i['user_id']
                db.answers.insert_one(resp.json())
                print('[OK] %s' % i['url'])

                is_end = json_resp['paging']['is_end']
                next_page = json_resp['paging']['next']
                if not is_end:
                    next_item = dict(i)
                    next_item['url'] = next_page
                    self.retry_request(next_item)
                else:
                    db.fail_url_token.delete_one({"_id": i["_id"]})
        except Exception as e:
            print('----------[PROBLEM] %s, err:%s' % (i['url'], str(e)))

    def run(self):
        a = list(db.fail_url_token.find())
        # db.fail_url_token.remove()
        for i in a:
            self.retry_request(dict(i))

if __name__ == '__main__':
    # refresh_login()
    # Boss(bulk_size=1000, offset=218001).start_work()
    # Boss(bulk_size=1, offset=200000, limit=1).start_work()
    RetryFail().run()