import requests
from faker import Faker
from lxml import etree
import json
import math
import time
import random

# class Message :

header={
'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.163 Safari/537.36',
'Accept': 'application/json'
}
def get_message(url):
	html = requests.get(url,headers=header).content.decode("utf8")
	jxxi = etree.HTML(html)
	jx = jxxi.xpath("//script/text()")[5]
	return get_json(jx)

def get_json(jx):
	hide_str = jx+""
	hide_dict = hide_str[19:-1]
	hide_json = json.loads(hide_dict)
	return hide_json

def parse_four(shopId,allCommentNum):
	page = 0

	# url = "https://www.meituan.com/meishi/api/poi/getMerchantComment?\
	# uuid=57ff7157f23a4631b6bf.1602837341.1.0.0\
	# &platform=1\
	# &partner=126\
	# &originUrl=https://www.meituan.com/meishi/"+str(shopId)+"/\
	# &riskLevel=1\
	# &optimusCode=10\
	# &id="+str(shopId)+"\
	# &userId=\
	# &offset="+str(page*10)+"\
	# &pageSize=10\
	# &sortType=1"

	# url = "https://www.meituan.com/meishi/api/poi/getMerchantComment?uuid=57ff7157f23a4631b6bf.1602837341.1.0.0&platform=1&partner=126&originUrl=http://www.meituan.com/meishi/"+str(shopId)+"/&riskLevel=1&optimusCode=10&id="+str(shopId)+"&userId=&offset=0&pageSize=10&sortType=1"
	
	# data = {
	# 	'uuid': '57ff7157f23a4631b6bf.1602837341.1.0.0',
	# 	'platform': '1',
	# 	'partner':'126',
	# 	'originUrl':"https://www.meituan.com/meishi/" +str(shopId)+"/",
	# 	'riskLevel':'1',
	# 	'optimusCode':'10',
	# 	'id':str(shopId),
	# 	'userId':'972594082',
	# 	'offset':str(page*10),
	# 	'pageSize':'10',
	# 	'sortType':'1'
	# 	}
	tags = []
	# time.sleep(random.uniform(2,5))
	# aj = (json.loads(requests.get(url=url,headers=header).text))['data']
	# # print(aj)
	# for t in aj['tags']:
	# 	tags.append({'tag':t['tag'],'count':t['count']})
	all_page = int(math.ceil(float(allCommentNum)/10))
	comments_list = []
	for page in range(all_page):
		url = "https://www.meituan.com/meishi/api/poi/getMerchantComment?uuid=57ff7157f23a4631b6bf.1602837341.1.0.0&platform=1&partner=126&originUrl=http://www.meituan.com/meishi/"+str(shopId)+"/&riskLevel=1&optimusCode=10&id="+str(shopId)+"&userId=&offset="+str(page*10)+"&pageSize=10&sortType=1"

		# data = {
		# 'uuid': '57ff7157f23a4631b6bf.1602837341.1.0.0',
		# 'platform': '1',
		# 'partner':'126',
		# 'originUrl':"https://www.meituan.com%/meishi/" +str(shopId)+"/",
		# 'riskLevel':'1',
		# 'optimusCode':'10',
		# 'id':str(shopId),
		# 'userId':'',
		# 'offset':page*10,
		# 'pageSize':'10',
		# 'sortType':'1'
		# }
		print("scrapying page in:"+str(page)+"/"+str(all_page)+" at "+str(shopId))
		time.sleep(random.uniform(1,3))
		data = json.loads(requests.get(url=url,headers=header).text)['data']
		if page == 0:
			all_tags = data['tags']
			for t in all_tags:
				tags.append({'tag':t['tag'],'count':t['count']})
		comments = data['comments']
		if comments==None:
			break
		for comment in comments:
			c = comment['comment']
			commentTime = comment['commentTime']
			comments_list.append({'comment':c,'commentTime':commentTime})
	return [tags,comments_list]