# -*- coding:utf-8 -*-
import re
import json
import requests
from requests.exceptions import RequestException


headers = {
	'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.75 Safari/537.36',
}


def get_one_page(url):
	try:
		response = requests.get(url, headers=headers)
		if response.status_code == 200:
			return response.text
		return None
	except RequestException:
		return None


def parse_one_page(page_strings):
	pattern = re.compile('<dd>.*?board-index.*?>(\d+)</i>'
						 '.*?data-src="(.*?)"'
						 '.*?name"><a.*?>(.*?)</a>'
						 '.*?star">(.*?)</p>'
						 '.*?releasetime">(.*?)</p>'
						 '.*?integer">(.*?)</i>'
						 '.*?fraction">(.*?)</i>.*?</dd>', re.S)

	data = re.findall(pattern,page_strings)
	for i in data:
		yield {
			'index': i[0],
			'image': i[1],
			'title': i[2],
			'actor': i[3].strip()[3:],
			'time': i[4].strip()[5:],
			'score': i[5]+i[6],
		}

def write_to_file(content):
	with open('result.txt', 'a') as f:
		f.write(json.dumps(content) + '\n')
		f.close()


def main(offset=0):
	url = 'http://maoyan.com/board/4?offset' + str(offset)
	doc_data = get_one_page(url)
	# print doc_data
	for i in parse_one_page(doc_data):
		print i
		write_to_file(i)

if __name__ == '__main__':
	for i in range(0,100,10):
		# print i
		main(i)



