# -*-coding: utf-8 -*-
import requests
import datetime
from pyquery import PyQuery as pq
from image import async_crawl
from model import Gallery
from config import db


URL_TEMPLATE = 'http://p.djys.com/meimo/l_{index}.html'
COVER_PATH_TEMPLATE = '/data/crawler/{ymd}/{gallery_id}/0.jpg'
YMD = lambda : datetime.datetime.now().strftime('%Y%m%d')

headers = {
	'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
	'Accept-Encoding': 'gzip, deflate, sdch',
	'Accept-Language': 'zh-CN,zh;q=0.8',
	'Cache-Control': 'max-age=0',
	'Connection': 'keep-alive',
	'Cookie': 'safedog-flow-item=17ED8DA863E3FD968464121A30A083FF; bdshare_firstime=1465349930302; BDTUJIAID=1251abfcb4af114d801ce9ce6901fcb7; CNZZDATA4774081=cnzz_eid%3D1582629994-1465348976-%26ntime%3D1465348976',
	'Host': 'p.djys.com',
	'If-Modified-Since': 'Mon, 16 Nov 2015 07:47:55 GMT',
	'If-None-Match': '"80c7801a4320d11:846"',
	'Referer': 'http://p.djys.com/',
	'Upgrade-Insecure-Requests': 1,
	'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.87 Safari/537.36',
}


def parse_li(e):
	'''
	parse gallery list page
	'''
	src = e.cssselect('a img')[0].attrib.get('src', '')		# 相册封面图片url
	href = e.cssselect('a')[0].attrib.get('href', '')		# 相册地址
	title = e.cssselect('a')[0].attrib.get('title', '')		# 相册title

	return locals()


def get_all_items():
	'''
	open each page and get the lis
	'''
	all_lis = []
	index = 1
	while True:
		print index
		response = requests.get(URL_TEMPLATE.format(index=index), headers=headers)
		if response.content.find('指定的页面不存在') >= 0:
			break
		print index
		html_obj = pq(response.content)
		all_lis.extend(html_obj('ul.picl li'))
		index += 1
		break
	return all_lis


if __name__ == '__main__':
	all_items = get_all_items()

	for item in all_items:
		info = parse_li(item)
		print info
		gallery = Gallery.add(source='p.djys.com',href=info.get('href'),title=info.get('title', ''), original_src=info.get('src', ))
		
		gallery.cover = COVER_PATH_TEMPLATE.format(ymd=YMD(), gallery_id=gallery.id)
		db.session.commit()
		async_crawl(info.get('src'), gallery.cover)


